1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00

For #2200, Enable RTC and FLV for GB28181

This commit is contained in:
xialixin@kanzhun.com 2021-02-18 21:51:49 +08:00 committed by winlin
parent 548fcd627b
commit fdaee20b81
12 changed files with 1289 additions and 845 deletions

View file

@ -50,7 +50,7 @@ using namespace std;
#include <srs_app_rtmp_conn.hpp>
#include <srs_protocol_utility.hpp>
#include <srs_protocol_format.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
//#define W_PS_FILE
//#define W_VIDEO_FILE
@ -405,8 +405,7 @@ srs_error_t SrsGb28181PsRtpProcessor::rtmpmuxer_enqueue_data(SrsGb28181RtmpMuxer
srs_error_t SrsGb28181PsRtpProcessor::on_rtp_packet_jitter(const sockaddr* from, const int fromlen, char* buf, int nb_buf)
{
srs_error_t err = srs_success;
bool completed = false;
pprint->elapse();
char address_string[64];
@ -739,13 +738,12 @@ srs_error_t SrsGb28181TcpPsRtpProcessor::rtmpmuxer_enqueue_data(SrsGb28181RtmpMu
srs_error_t SrsGb28181TcpPsRtpProcessor::on_rtp_packet_jitter(char* buf, int nb_buf, std::string ip, int port)
{
srs_error_t err = srs_success;
bool completed = false;
pprint->elapse();
char address_string[64] = {0};
char port_string[16] = {0};
/*if (getnameinfo(from, fromlen,
/*char port_string[16] = {0};
if (getnameinfo(from, fromlen,
(char*)&address_string, sizeof(address_string),
(char*)&port_string, sizeof(port_string),
NI_NUMERICHOST | NI_NUMERICSERV)) {
@ -1109,8 +1107,8 @@ srs_error_t SrsPsStreamDemixer::on_ps_stream(char* ps_data, int ps_size, uint32_
uint8_t p1 = (uint8_t)(next_ps_pack[0]);
uint8_t p2 = (uint8_t)(next_ps_pack[1]);
uint8_t p3 = (uint8_t)(next_ps_pack[2]);
uint8_t p4 = (uint8_t)(next_ps_pack[3]);
//uint8_t p3 = (uint8_t)(next_ps_pack[2]);
//uint8_t p4 = (uint8_t)(next_ps_pack[3]);
if (audio_enable && audio_es_type != STREAM_TYPE_AUDIO_AAC &&
(p1 & 0xFF) == 0xFF && (p2 & 0xF0) == 0xF0) {
@ -1260,8 +1258,8 @@ SrsGb28181RtmpMuxer::SrsGb28181RtmpMuxer(SrsGb28181Manger* c, std::string id, bo
((SrsSTCoroutine*)trd)->set_stack_size(1 << 18);
sdk = NULL;
vjitter = new SrsRtspJitter();
ajitter = new SrsRtspJitter();
vjitter = new SrsRtpTimeJitter();
ajitter = new SrsRtpTimeJitter();
avc = new SrsRawH264Stream();
aac = new SrsRawAacStream();
@ -1284,8 +1282,8 @@ SrsGb28181RtmpMuxer::SrsGb28181RtmpMuxer(SrsGb28181Manger* c, std::string id, bo
source = NULL;
source_publish = true;
jitter_buffer = new SrsPsJitterBuffer(id);
jitter_buffer_audio = new SrsPsJitterBuffer(id);
jitter_buffer = new SrsRtpJitterBuffer(id);
jitter_buffer_audio = new SrsRtpJitterBuffer(id);
ps_buflen = 0;
ps_buffer = NULL;
@ -1414,7 +1412,7 @@ srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
srs_error_t err = srs_success;
if (!jitter_buffer) {
jitter_buffer = new SrsPsJitterBuffer(channel_id);
jitter_buffer = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer->SetDecodeErrorMode(kSelectiveErrors);
@ -1422,7 +1420,7 @@ srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
jitter_buffer->SetNackSettings(250, 450, 0);
if (!jitter_buffer_audio) {
jitter_buffer_audio = new SrsPsJitterBuffer(channel_id);
jitter_buffer_audio = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer_audio->SetDecodeErrorMode(kSelectiveErrors);
@ -1458,6 +1456,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
send_rtmp_stream_time = srs_get_system_time();
uint32_t cur_timestamp = 0;
int buffer_size = 0;
bool keyframe = false;
//consume ps stream, and check status
while (true) {
@ -1472,7 +1471,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
if (config.jitterbuffer_enable){
if(jitter_buffer->FoundFrame(cur_timestamp)){
jitter_buffer->GetPsFrame(&ps_buffer, ps_buflen, buffer_size, cur_timestamp);
jitter_buffer->GetFrame(&ps_buffer, ps_buflen, buffer_size, keyframe, cur_timestamp);
if (buffer_size > 0){
if ((err = ps_demixer->on_ps_stream(ps_buffer, buffer_size, cur_timestamp, 0)) != srs_success){
@ -1483,7 +1482,7 @@ srs_error_t SrsGb28181RtmpMuxer::do_cycle()
}
if(jitter_buffer_audio->FoundFrame(cur_timestamp)){
jitter_buffer_audio->GetPsFrame(&ps_buffer_audio, ps_buflen_auido, buffer_size, cur_timestamp);
jitter_buffer_audio->GetFrame(&ps_buffer_audio, ps_buflen_auido, buffer_size, keyframe, cur_timestamp);
if (buffer_size > 0){
if ((err = ps_demixer->on_ps_stream(ps_buffer_audio, buffer_size, cur_timestamp, 0)) != srs_success){
@ -1600,10 +1599,12 @@ void SrsGb28181RtmpMuxer::insert_jitterbuffer(SrsPsRtpPacket *pkt)
//otherwise audio uses jitter_buffer_audio, and video uses jitter_buffer
if (av_same_ts){
pkt->marker = false;
jitter_buffer->InsertPacket(*pkt, pkt->payload->bytes(), pkt->payload->length(), NULL);
jitter_buffer->InsertPacket(pkt->sequence_number, pkt->timestamp, pkt->marker,
pkt->payload->bytes(), pkt->payload->length(), NULL);
ps_rtp_video_ts = pkt->timestamp;
}else {
jitter_buffer_audio->InsertPacket(*pkt, pkt->payload->bytes(), pkt->payload->length(), NULL);
jitter_buffer_audio->InsertPacket(pkt->sequence_number, pkt->timestamp, pkt->marker,
pkt->payload->bytes(), pkt->payload->length(), NULL);
}
//srs_cond_signal(wait_ps_queue);
@ -1771,14 +1772,8 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
//0001xxxxxxxxxx
//xxxx0001xxxxxxx
uint32_t naluLen = size - cur_pos;
char *p = (char*)&naluLen;
video_data[cur_pos] = p[3];
video_data[cur_pos+1] = p[2];
video_data[cur_pos+2] = p[1];
video_data[cur_pos+3] = p[0];
uint32_t naluLen = size - cur_pos - 4;
char *frame = video_data + cur_pos + 4;
int frame_size = naluLen;
@ -1797,13 +1792,7 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
//0001xxxxxxxx0001xxxxxxxx0001xxxxxxxxx
//xxxxxxxxxxxx0001xxxxxxxx0001xxxxxxxxx
uint32_t naluLen = cur_pos - pre_pos - 4;
char *p = (char*)&naluLen;
video_data[pre_pos] = p[3];
video_data[pre_pos+1] = p[2];
video_data[pre_pos+2] = p[1];
video_data[pre_pos+3] = p[0];
char *frame = video_data + pre_pos + 4;
int frame_size = naluLen;
@ -1816,13 +1805,7 @@ srs_error_t SrsGb28181RtmpMuxer::write_h264_ipb_frame2(char *frame, int frame_si
if (first_pos != pre_pos){
uint32_t naluLen = size - pre_pos - 4;
char *p = (char*)&naluLen;
video_data[pre_pos] = p[3];
video_data[pre_pos+1] = p[2];
video_data[pre_pos+2] = p[1];
video_data[pre_pos+3] = p[0];
char *frame = video_data + pre_pos + 4;
int frame_size = naluLen;
@ -2167,6 +2150,9 @@ SrsGb28181StreamChannel::SrsGb28181StreamChannel(){
rtp_peer_port = 0;
rtp_peer_ip = "";
rtmp_url = "";
flv_url = "";
hls_url = "";
webrtc_url = "";
recv_time = 0;
recv_time_str = "";
}
@ -2191,6 +2177,9 @@ void SrsGb28181StreamChannel::copy(const SrsGb28181StreamChannel *s){
rtp_peer_port = s->get_rtp_peer_port();
rtmp_url = s->get_rtmp_url();
flv_url = s->get_flv_url();
hls_url = s->get_hls_url();
webrtc_url = s->get_webrtc_url();
recv_time_str = s->get_recv_time_str();
recv_time = s->get_recv_time();
@ -2205,6 +2194,9 @@ void SrsGb28181StreamChannel::dumps(SrsJsonObject* obj)
obj->set("app", SrsJsonAny::str(app.c_str()));
obj->set("stream", SrsJsonAny::str(stream.c_str()));
obj->set("rtmp_url", SrsJsonAny::str(rtmp_url.c_str()));
obj->set("flv_url", SrsJsonAny::str(flv_url.c_str()));
obj->set("hls_url", SrsJsonAny::str(hls_url.c_str()));
obj->set("webrtc_url", SrsJsonAny::str(webrtc_url.c_str()));
obj->set("ssrc", SrsJsonAny::integer(ssrc));
obj->set("rtp_port", SrsJsonAny::integer(rtp_port));
@ -2298,7 +2290,7 @@ uint32_t SrsGb28181Manger::generate_ssrc(std::string id)
//gb28181 live ssrc max value 0999999999(3B9AC9FF)
//gb28181 vod ssrc max value 1999999999(773593FF)
uint8_t index = uint8_t(rand() % (0x0F - 0x01 + 1) + 0x01);
uint32_t ssrc = 0x2FFFF00 & (hash_code(id) << 8) | index;
uint32_t ssrc = ((0x2FFFF00) & (hash_code(id) << 8)) | index;
//uint32_t ssrc = 0x00FFFFFF & (hash_code(id));
srs_trace("gb28181: generate ssrc id=%s, ssrc=%u", id.c_str(), ssrc);
return ssrc;
@ -2599,7 +2591,21 @@ srs_error_t SrsGb28181Manger::create_stream_channel(SrsGb28181StreamChannel *cha
channel->set_rtmp_port(rtmp_port);
channel->set_ip(config->host);
std::string play_url = srs_generate_rtmp_url(config->host, rtmp_port, "", "", app, stream_name, "");
std::string flv_url = srs_string_replace(play_url, "rtmp://", "http://");
std::stringstream port;
port << ":" << rtmp_port;
flv_url = srs_string_replace(flv_url, port.str(), ":"+_srs_config->get_http_stream_listen());
std::string hls_url = flv_url + ".m3u8";
flv_url = flv_url + ".flv";
std::string webrtc_url = srs_string_replace(play_url, "rtmp://", "webrtc://");
webrtc_url = srs_string_replace(webrtc_url, port.str(), ":"+_srs_config->get_http_api_listen());
channel->set_rtmp_url(play_url);
channel->set_flv_url(flv_url);
channel->set_hls_url(hls_url);
channel->set_webrtc_url(webrtc_url);
request.app = app;
request.stream = stream_name;
@ -2876,7 +2882,6 @@ srs_error_t SrsGb28181Conn::do_cycle()
nb_read = nb_read + leftDataLength;
length;
pp = (char*)&length;
p = &(mbuffer[0]);
pp[1] = *p++;

View file

@ -34,13 +34,12 @@
#include <srs_app_st.hpp>
#include <srs_app_listener.hpp>
#include <srs_rtsp_stack.hpp>
#include <srs_kernel_stream.hpp>
#include <srs_app_log.hpp>
#include <srs_kernel_file.hpp>
#include <srs_protocol_json.hpp>
#include <srs_app_gb28181_sip.hpp>
#include <srs_app_gb28181_jitbuffer.hpp>
#include <srs_app_rtc_jitbuffer.hpp>
#include <srs_rtmp_stack.hpp>
#include <srs_app_source.hpp>
#include <srs_service_conn.hpp>
@ -90,7 +89,7 @@ class SrsPithyPrint;
class SrsSimpleRtmpClient;
class SrsSipStack;
class SrsGb28181Manger;
class SrsRtspJitter;
class SrsRtpTimeJitter;
class SrsSipRequest;
class SrsGb28181RtmpMuxer;
class SrsGb28181Config;
@ -99,7 +98,7 @@ class SrsGb28181TcpPsRtpProcessor;
class SrsGb28181SipService;
class SrsGb28181StreamChannel;
class SrsGb28181SipSession;
class SrsPsJitterBuffer;
class SrsRtpJitterBuffer;
class SrsServer;
class SrsSource;
class SrsRequest;
@ -316,8 +315,8 @@ private:
srs_cond_t wait_ps_queue;
SrsSimpleRtmpClient* sdk;
SrsRtspJitter* vjitter;
SrsRtspJitter* ajitter;
SrsRtpTimeJitter* vjitter;
SrsRtpTimeJitter* ajitter;
SrsRawH264Stream* avc;
std::string h264_sps;
@ -330,8 +329,8 @@ private:
SrsSource* source;
SrsServer* server;
SrsPsJitterBuffer *jitter_buffer;
SrsPsJitterBuffer *jitter_buffer_audio;
SrsRtpJitterBuffer *jitter_buffer;
SrsRtpJitterBuffer *jitter_buffer_audio;
char *ps_buffer;
char *ps_buffer_audio;
@ -340,7 +339,6 @@ private:
int ps_buflen_auido;
uint32_t ps_rtp_video_ts;
uint32_t ps_rtp_audio_ts;
bool source_publish;
@ -444,6 +442,9 @@ private:
std::string app;
std::string stream;
std::string rtmp_url;
std::string flv_url;
std::string hls_url;
std::string webrtc_url;
std::string ip;
int rtp_port;
@ -472,6 +473,9 @@ public:
uint32_t get_rtp_peer_port() const { return rtp_peer_port; }
std::string get_rtp_peer_ip() const { return rtp_peer_ip; }
std::string get_rtmp_url() const { return rtmp_url; }
std::string get_flv_url() const { return flv_url; }
std::string get_hls_url() const { return hls_url; }
std::string get_webrtc_url() const { return webrtc_url; }
srs_utime_t get_recv_time() const { return recv_time; }
std::string get_recv_time_str() const { return recv_time_str; }
@ -486,6 +490,9 @@ public:
void set_rtp_peer_ip( const std::string &p) { rtp_peer_ip = p; }
void set_rtp_peer_port( const int &s) { rtp_peer_port = s;}
void set_rtmp_url( const std::string &u) { rtmp_url = u; }
void set_flv_url( const std::string &u) { flv_url = u; }
void set_hls_url( const std::string &u) { hls_url = u; }
void set_webrtc_url( const std::string &u) { webrtc_url = u; }
void set_recv_time( const srs_utime_t &u) { recv_time = u; }
void set_recv_time_str( const std::string &u) { recv_time_str = u; }

View file

@ -43,7 +43,7 @@ using namespace std;
#include <srs_kernel_utility.hpp>
#include <srs_kernel_codec.hpp>
#include <srs_app_pithy_print.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#include <srs_app_gb28181.hpp>

View file

@ -31,7 +31,7 @@
#include <map>
#include <srs_app_log.hpp>
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#include <srs_app_gb28181.hpp>
#include <srs_app_pithy_print.hpp>
#include <srs_service_conn.hpp>

View file

@ -21,8 +21,8 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef SRS_APP_GB28181_JITBUFFER_HPP
#define SRS_APP_GB28181_JITBUFFER_HPP
#ifndef SRS_APP_RTC_RTP_JITBUFFER_HPP
#define SRS_APP_RTC_RTP_JITBUFFER_HPP
#include <srs_core.hpp>
@ -36,13 +36,15 @@
#include <srs_app_log.hpp>
#include <srs_kernel_utility.hpp>
#include <srs_app_gb28181.hpp>
#include <srs_kernel_rtc_rtp.hpp>
#include <srs_kernel_flv.hpp>
class SrsPsRtpPacket;
class SrsPsFrameBuffer;
class PsDecodingState;
class SrsRtpFrameBuffer;
class SrsRtpDecodingState;
class SrsGb28181RtmpMuxer;
class VCMPacket;
class SrsRtpPacket2;
///jittbuffer
@ -57,7 +59,7 @@ enum FrameType {
};
// Used to indicate which decode with errors mode should be used.
enum PsDecodeErrorMode {
enum SrsRtpDecodeErrorMode {
kNoErrors, // Never decode with errors. Video will freeze
// if nack is disabled.
kSelectiveErrors, // Frames that are determined decodable in
@ -79,7 +81,7 @@ enum { kMaxVideoDelayMs = 10000 };
enum { kPacketsPerFrameMultiplier = 5 };
enum { kFastConvergeThreshold = 5};
enum PsJitterBufferEnum {
enum SrsRtpJitterBufferEnum {
kMaxConsecutiveOldFrames = 60,
kMaxConsecutiveOldPackets = 300,
kMaxPacketsInSession = 800,
@ -87,7 +89,7 @@ enum PsJitterBufferEnum {
kMaxJBFrameSizeBytes = 4000000 // sanity don't go above 4Mbyte.
};
enum PsFrameBufferEnum {
enum SrsRtpFrameBufferEnum {
kOutOfBoundsPacket = -7,
kNotInitialized = -6,
kOldPacket = -5,
@ -102,18 +104,65 @@ enum PsFrameBufferEnum {
kDuplicatePacket = 5 // We're receiving a duplicate packet.
};
enum PsFrameBufferStateEnum {
enum SrsRtpFrameBufferStateEnum {
kStateEmpty, // frame popped by the RTP receiver
kStateIncomplete, // frame that have one or more packet(s) stored
kStateComplete, // frame that have all packets
kStateDecodable // Hybrid mode - frame can be decoded
};
enum PsNackMode {
enum SrsRtpNackMode {
kNack,
kNoNack
};
// Used to indicate if a received packet contain a complete NALU (or equivalent)
enum VCMNaluCompleteness {
kNaluUnset = 0, // Packet has not been filled.
kNaluComplete = 1, // Packet can be decoded as is.
kNaluStart, // Packet contain beginning of NALU
kNaluIncomplete, // Packet is not beginning or end of NALU
kNaluEnd, // Packet is the end of a NALU
};
enum RtpVideoCodecTypes {
kRtpVideoNone,
kRtpVideoGeneric,
kRtpVideoVp8,
kRtpVideoVp9,
kRtpVideoH264,
kRtpVideoPS
};
// Video codec types
enum VideoCodecType {
kVideoCodecVP8,
kVideoCodecVP9,
kVideoCodecH264,
kVideoCodecH264SVC,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
kVideoCodecGeneric,
kVideoCodecH264PS,
kVideoCodecUnknown
};
// The packetization types that we support: single, aggregated, and fragmented.
enum H264PacketizationTypes {
kH264SingleNalu, // This packet contains a single NAL unit.
kH264StapA, // This packet contains STAP-A (single time
// aggregation) packets. If this packet has an
// associated NAL unit type, it'll be for the
// first such aggregated packet.
kH264FuA, // This packet contains a FU-A (fragmentation
// unit) packet, meaning it is a part of a frame
// that was too large to fit into a single packet.
};
enum { kH264StartCodeLengthBytes = 4};
// Used to pass data from jitter buffer to session info.
// This data is then used in determining whether a frame is decodable.
struct FrameData {
@ -147,7 +196,7 @@ inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2)
return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2;
}
typedef std::list<SrsPsFrameBuffer*> UnorderedFrameList;
typedef std::list<SrsRtpFrameBuffer*> UnorderedFrameList;
class TimestampLessThan {
public:
@ -159,16 +208,16 @@ public:
};
class FrameList
: public std::map<uint32_t, SrsPsFrameBuffer*, TimestampLessThan> {
: public std::map<uint32_t, SrsRtpFrameBuffer*, TimestampLessThan> {
public:
void InsertFrame(SrsPsFrameBuffer* frame);
SrsPsFrameBuffer* PopFrame(uint32_t timestamp);
SrsPsFrameBuffer* Front() const;
SrsPsFrameBuffer* FrontNext() const;
SrsPsFrameBuffer* Back() const;
void InsertFrame(SrsRtpFrameBuffer* frame);
SrsRtpFrameBuffer* PopFrame(uint32_t timestamp);
SrsRtpFrameBuffer* Front() const;
SrsRtpFrameBuffer* FrontNext() const;
SrsRtpFrameBuffer* Back() const;
int RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
UnorderedFrameList* free_frames);
void CleanUpOldOrEmptyFrames(PsDecodingState* decoding_state, UnorderedFrameList* free_frames);
void CleanUpOldOrEmptyFrames(SrsRtpDecodingState* decoding_state, UnorderedFrameList* free_frames);
void Reset(UnorderedFrameList* free_frames);
};
@ -180,7 +229,13 @@ public:
size_t size,
uint16_t seqNum,
uint32_t timestamp,
bool markerBit);
bool markerBit,
H264PacketizationTypes type,
RtpVideoCodecTypes rtpType,
bool singlenual,
bool isfirst,
FrameType ftype
);
void Reset();
@ -191,27 +246,35 @@ public:
uint16_t seqNum;
const uint8_t* dataPtr;
size_t sizeBytes;
bool markerBit;
bool markerBit;
FrameType frameType;
//cloopenwebrtc::VideoCodecType codec;
VideoCodecType codec;
bool isFirstPacket; // Is this first packet in a frame.
//VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
bool insertStartCode; // True if a start code should be inserted before this
// packet.
int width;
int height;
//RTPVideoHeader codecSpecificHeader;
//H264 header
H264PacketizationTypes h264packetizationType;
bool h264singleNalu;
public:
void CopyCodecSpecifics(RtpVideoCodecTypes codecType, bool H264single_nalu, bool firstPacket);
};
class SrsPsFrameBuffer {
class SrsRtpFrameBuffer {
public:
SrsPsFrameBuffer();
virtual ~SrsPsFrameBuffer();
SrsRtpFrameBuffer();
virtual ~SrsRtpFrameBuffer();
public:
PsFrameBufferEnum InsertPacket(const VCMPacket& packet, const FrameData& frame_data);
SrsRtpFrameBufferEnum InsertPacket(const VCMPacket& packet, const FrameData& frame_data);
void UpdateCompleteSession();
void UpdateDecodableSession(const FrameData& frame_data);
bool HaveFirstPacket() const;
@ -220,7 +283,7 @@ public:
uint32_t GetTimeStamp() const;
FrameType GetFrameType() const;
PsFrameBufferStateEnum GetState() const;
SrsRtpFrameBufferStateEnum GetState() const;
int32_t GetHighSeqNum() const;
int32_t GetLowSeqNum() const;
@ -233,7 +296,6 @@ public:
bool complete() const;
bool decodable() const;
bool GetPsPlayload(SrsSimpleStream **ps_data, int &count);
bool DeletePacket(int &count);
void PrepareForDecode(bool continuous);
@ -248,7 +310,7 @@ private:
const PacketIterator& prev_packet_it);
size_t InsertBuffer(uint8_t* frame_buffer, PacketIterator packet_it);
size_t Insert(const uint8_t* buffer, size_t length, uint8_t* frame_buffer);
size_t Insert(const uint8_t* buffer, size_t length, bool insert_start_code, uint8_t* frame_buffer);
void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
void VerifyAndAllocate(const uint32_t minimumSize);
void UpdateDataPointers(const uint8_t* old_base_ptr, const uint8_t* new_base_ptr);
@ -269,11 +331,11 @@ private:
uint32_t timeStamp_;
FrameType frame_type_;
PsDecodeErrorMode decode_error_mode_;
PsFrameBufferStateEnum state_;
SrsRtpDecodeErrorMode decode_error_mode_;
SrsRtpFrameBufferStateEnum state_;
uint16_t nackCount_;
int64_t latestPacketTimeMs_;
//uint16_t nackCount_;
//int64_t latestPacketTimeMs_;
// The payload.
uint8_t* _buffer;
@ -281,20 +343,20 @@ private:
size_t _length;
};
class PsDecodingState {
class SrsRtpDecodingState {
public:
PsDecodingState();
~PsDecodingState();
SrsRtpDecodingState();
~SrsRtpDecodingState();
// Check for old frame
bool IsOldFrame(const SrsPsFrameBuffer* frame) const;
bool IsOldFrame(const SrsRtpFrameBuffer* frame) const;
// Check for old packet
bool IsOldPacket(const VCMPacket* packet);
// Check for frame continuity based on current decoded state. Use best method
// possible, i.e. temporal info, picture ID or sequence number.
bool ContinuousFrame(const SrsPsFrameBuffer* frame) const;
void SetState(const SrsPsFrameBuffer* frame);
void CopyFrom(const PsDecodingState& state);
bool UpdateEmptyFrame(const SrsPsFrameBuffer* frame);
bool ContinuousFrame(const SrsRtpFrameBuffer* frame) const;
void SetState(const SrsRtpFrameBuffer* frame);
void CopyFrom(const SrsRtpDecodingState& state);
bool UpdateEmptyFrame(const SrsRtpFrameBuffer* frame);
// Update the sequence number if the timestamp matches current state and the
// sequence number is higher than the current one. This accounts for packets
// arriving late.
@ -309,64 +371,81 @@ public:
bool full_sync() const;
private:
void UpdateSyncState(const SrsPsFrameBuffer* frame);
void UpdateSyncState(const SrsRtpFrameBuffer* frame);
// Designated continuity functions
//bool ContinuousPictureId(int picture_id) const;
bool ContinuousSeqNum(uint16_t seq_num) const;
//bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
//bool UsingPictureId(const SrsPsFrameBuffer* frame) const;
//bool UsingPictureId(const SrsRtpFrameBuffer* frame) const;
// Keep state of last decoded frame.
// TODO(mikhal/stefan): create designated classes to handle these types.
uint16_t sequence_num_;
uint32_t time_stamp_;
int picture_id_;
int temporal_id_;
int tl0_pic_id_;
bool full_sync_; // Sync flag when temporal layers are used.
bool in_initial_state_;
bool m_firstPacket;
};
class SrsPsJitterBuffer
// The time jitter correct for rtp.
class SrsRtpTimeJitter
{
private:
int64_t previous_timestamp;
int64_t pts;
int delta;
public:
SrsRtpTimeJitter();
virtual ~SrsRtpTimeJitter();
public:
int64_t timestamp();
srs_error_t correct(int64_t& ts);
void reset();
};
class SrsRtpJitterBuffer
{
public:
SrsPsJitterBuffer(std::string key);
virtual ~SrsPsJitterBuffer();
SrsRtpJitterBuffer(std::string key);
virtual ~SrsRtpJitterBuffer();
public:
srs_error_t start();
void Reset();
PsFrameBufferEnum InsertPacket(const SrsPsRtpPacket &packet, char *buf, int size, bool* retransmitted);
void ReleaseFrame(SrsPsFrameBuffer* frame);
SrsRtpFrameBufferEnum InsertPacket2(const SrsRtpPacket2 &pkt, bool* retransmitted);
SrsRtpFrameBufferEnum InsertPacket(uint16_t seq, uint32_t ts, bool maker, char *buf, int size,
bool* retransmitted);
void ReleaseFrame(SrsRtpFrameBuffer* frame);
bool FoundFrame(uint32_t& time_stamp);
bool GetPsFrame(char **buffer, int &buf_len, int &size, const uint32_t time_stamp);
void SetDecodeErrorMode(PsDecodeErrorMode error_mode);
void SetNackMode(PsNackMode mode,int64_t low_rtt_nack_threshold_ms,
bool GetFrame(char **buffer, int &buf_len, int &size, bool &keyframe, const uint32_t time_stamp);
void SetDecodeErrorMode(SrsRtpDecodeErrorMode error_mode);
void SetNackMode(SrsRtpNackMode mode,int64_t low_rtt_nack_threshold_ms,
int64_t high_rtt_nack_threshold_ms);
void SetNackSettings(size_t max_nack_list_size,int max_packet_age_to_nack,
int max_incomplete_time_ms);
uint16_t* GetNackList(uint16_t* nack_list_size, bool* request_key_frame);
void Flush();
void ResetJittter();
bool isFirstKeyFrame;
private:
PsFrameBufferEnum GetFrame(const VCMPacket& packet, SrsPsFrameBuffer** frame,
SrsRtpFrameBufferEnum GetFrameByRtpPacket(const VCMPacket& packet, SrsRtpFrameBuffer** frame,
FrameList** frame_list);
SrsPsFrameBuffer* GetEmptyFrame();
SrsRtpFrameBuffer* GetEmptyFrame();
bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
SrsPsFrameBuffer* ExtractAndSetDecode(uint32_t timestamp);
SrsPsFrameBuffer* NextFrame() const;
SrsRtpFrameBuffer* ExtractAndSetDecode(uint32_t timestamp);
SrsRtpFrameBuffer* NextFrame() const;
bool TryToIncreaseJitterBufferSize();
bool RecycleFramesUntilKeyFrame();
bool IsContinuous(const SrsPsFrameBuffer& frame) const;
bool IsContinuousInState(const SrsPsFrameBuffer& frame,
const PsDecodingState& decoding_state) const;
void FindAndInsertContinuousFrames(const SrsPsFrameBuffer& new_frame);
bool IsContinuous(const SrsRtpFrameBuffer& frame) const;
bool IsContinuousInState(const SrsRtpFrameBuffer& frame,
const SrsRtpDecodingState& decoding_state) const;
void FindAndInsertContinuousFrames(const SrsRtpFrameBuffer& new_frame);
void CleanUpOldOrEmptyFrames();
//nack
@ -376,10 +455,13 @@ private:
bool MissingTooOldPacket(uint16_t latest_sequence_number) const;
bool HandleTooOldPackets(uint16_t latest_sequence_number);
void DropPacketsFromNackList(uint16_t last_decoded_sequence_number);
PsNackMode nack_mode() const;
SrsRtpNackMode nack_mode() const;
int NonContinuousOrIncompleteDuration();
uint16_t EstimatedLowSequenceNumber(const SrsPsFrameBuffer& frame) const;
uint16_t EstimatedLowSequenceNumber(const SrsRtpFrameBuffer& frame) const;
bool WaitForRetransmissions();
bool IsPacketInOrder(uint16_t sequence_number);
bool IsFirstPacketInFrame(uint32_t ts, uint16_t seq);
private:
class SequenceNumberLessThan {
@ -402,7 +484,7 @@ private:
UnorderedFrameList free_frames_;
FrameList decodable_frames_;
FrameList incomplete_frames_;
PsDecodingState last_decoded_state_;
SrsRtpDecodingState last_decoded_state_;
bool first_packet_since_reset_;
// Statistics.
@ -436,9 +518,9 @@ private:
//VCMInterFrameDelay inter_frame_delay_;
//VCMJitterSample waiting_for_completion_;
int64_t rtt_ms_;
// NACK and retransmissions.
PsNackMode nack_mode_;
SrsRtpNackMode nack_mode_;
int64_t low_rtt_nack_threshold_ms_;
int64_t high_rtt_nack_threshold_ms_;
// Holds the internal NACK list (the missing sequence numbers).
@ -449,12 +531,17 @@ private:
int max_packet_age_to_nack_; // Measured in sequence numbers.
int max_incomplete_time_ms_;
PsDecodeErrorMode decode_error_mode_;
SrsRtpDecodeErrorMode decode_error_mode_;
// Estimated rolling average of packets per frame
float average_packets_per_frame_;
// average_packets_per_frame converges fast if we have fewer than this many
// frames.
int frame_counter_;
uint32_t last_received_timestamp_;
uint16_t last_received_sequence_number_;
bool first_packet_;
};
#endif

View file

@ -21,7 +21,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include <srs_sip_stack.hpp>
#include <srs_app_sip.hpp>
#if !defined(SRS_EXPORT_LIBRTMP)