1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00

improve timestamp type: int -> uint.

This commit is contained in:
Jacob Su 2024-11-13 15:30:13 +08:00
parent 7951bf3bd6
commit 00d14daa16
6 changed files with 15 additions and 15 deletions

View file

@ -1489,7 +1489,7 @@ srs_error_t SrsRtcFrameBuilder::transcode_audio(SrsRtpPacket *pkt)
srs_error_t err = srs_success; srs_error_t err = srs_success;
// to common message. // to common message.
uint32_t ts = pkt->get_avsync_time(); uint64_t ts = pkt->get_avsync_time();
if (is_first_audio_) { if (is_first_audio_) {
int header_len = 0; int header_len = 0;
uint8_t* header = NULL; uint8_t* header = NULL;
@ -1543,7 +1543,7 @@ srs_error_t SrsRtcFrameBuilder::transcode_audio(SrsRtpPacket *pkt)
return err; return err;
} }
void SrsRtcFrameBuilder::packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header) void SrsRtcFrameBuilder::packet_aac(SrsCommonMessage* audio, char* data, int len, uint64_t pts, bool is_header)
{ {
int rtmp_len = len + 2; int rtmp_len = len + 2;
audio->header.initialize_audio(rtmp_len, pts, 1); audio->header.initialize_audio(rtmp_len, pts, 1);

View file

@ -352,7 +352,7 @@ public:
virtual srs_error_t on_rtp(SrsRtpPacket *pkt); virtual srs_error_t on_rtp(SrsRtpPacket *pkt);
private: private:
srs_error_t transcode_audio(SrsRtpPacket *pkt); srs_error_t transcode_audio(SrsRtpPacket *pkt);
void packet_aac(SrsCommonMessage* audio, char* data, int len, uint32_t pts, bool is_header); void packet_aac(SrsCommonMessage* audio, char* data, int len, uint64_t pts, bool is_header);
private: private:
srs_error_t packet_video(SrsRtpPacket* pkt); srs_error_t packet_video(SrsRtpPacket* pkt);
srs_error_t packet_video_key_frame(SrsRtpPacket* pkt); srs_error_t packet_video_key_frame(SrsRtpPacket* pkt);

View file

@ -1259,7 +1259,7 @@ class SrsFrame
{ {
public: public:
// The DTS/PTS in milliseconds, which is TBN=1000. // The DTS/PTS in milliseconds, which is TBN=1000.
int64_t dts; uint64_t dts;
// PTS = DTS + CTS. // PTS = DTS + CTS.
int32_t cts; int32_t cts;
public: public:

View file

@ -109,31 +109,31 @@ void SrsMessageHeader::initialize_amf0_script(int size, int stream)
message_type = RTMP_MSG_AMF0DataMessage; message_type = RTMP_MSG_AMF0DataMessage;
payload_length = (int32_t)size; payload_length = (int32_t)size;
timestamp_delta = (int32_t)0; timestamp_delta = (int32_t)0;
timestamp = (int64_t)0; timestamp = 0;
stream_id = (int32_t)stream; stream_id = (int32_t)stream;
// amf0 script use connection2 chunk-id // amf0 script use connection2 chunk-id
prefer_cid = RTMP_CID_OverConnection2; prefer_cid = RTMP_CID_OverConnection2;
} }
void SrsMessageHeader::initialize_audio(int size, uint32_t time, int stream) void SrsMessageHeader::initialize_audio(int size, uint64_t time, int stream)
{ {
message_type = RTMP_MSG_AudioMessage; message_type = RTMP_MSG_AudioMessage;
payload_length = (int32_t)size; payload_length = (int32_t)size;
timestamp_delta = (int32_t)time; timestamp_delta = (int32_t)time;
timestamp = (int64_t)time; timestamp = time;
stream_id = (int32_t)stream; stream_id = (int32_t)stream;
// audio chunk-id // audio chunk-id
prefer_cid = RTMP_CID_Audio; prefer_cid = RTMP_CID_Audio;
} }
void SrsMessageHeader::initialize_video(int size, uint32_t time, int stream) void SrsMessageHeader::initialize_video(int size, uint64_t time, int stream)
{ {
message_type = RTMP_MSG_VideoMessage; message_type = RTMP_MSG_VideoMessage;
payload_length = (int32_t)size; payload_length = (int32_t)size;
timestamp_delta = (int32_t)time; timestamp_delta = (int32_t)time;
timestamp = (int64_t)time; timestamp = time;
stream_id = (int32_t)stream; stream_id = (int32_t)stream;
// video chunk-id // video chunk-id

View file

@ -151,7 +151,7 @@ public:
// The 4 bytes are packed in the big-endian order. // The 4 bytes are packed in the big-endian order.
// @remark, used as calc timestamp when decode and encode time. // @remark, used as calc timestamp when decode and encode time.
// @remark, we use 64bits for large time for jitter detect and hls. // @remark, we use 64bits for large time for jitter detect and hls.
int64_t timestamp; uint64_t timestamp;
public: public:
// Get the prefered cid(chunk stream id) which sendout over. // Get the prefered cid(chunk stream id) which sendout over.
// set at decoding, and canbe used for directly send message, // set at decoding, and canbe used for directly send message,
@ -177,9 +177,9 @@ public:
// Create a amf0 script header, set the size and stream_id. // Create a amf0 script header, set the size and stream_id.
void initialize_amf0_script(int size, int stream); void initialize_amf0_script(int size, int stream);
// Create a audio header, set the size, timestamp and stream_id. // Create a audio header, set the size, timestamp and stream_id.
void initialize_audio(int size, uint32_t time, int stream); void initialize_audio(int size, uint64_t time, int stream);
// Create a video header, set the size, timestamp and stream_id. // Create a video header, set the size, timestamp and stream_id.
void initialize_video(int size, uint32_t time, int stream); void initialize_video(int size, uint64_t time, int stream);
}; };
// The message is raw data RTMP message, bytes oriented, // The message is raw data RTMP message, bytes oriented,

View file

@ -299,7 +299,7 @@ private:
// The helper handler for decoder, use RAW payload if NULL. // The helper handler for decoder, use RAW payload if NULL.
ISrsRtspPacketDecodeHandler* decode_handler; ISrsRtspPacketDecodeHandler* decode_handler;
private: private:
int64_t avsync_time_; uint64_t avsync_time_;
public: public:
SrsRtpPacket(); SrsRtpPacket();
virtual ~SrsRtpPacket(); virtual ~SrsRtpPacket();
@ -336,8 +336,8 @@ public:
public: public:
bool is_keyframe(); bool is_keyframe();
// Get and set the packet sync time in milliseconds. // Get and set the packet sync time in milliseconds.
void set_avsync_time(int64_t avsync_time) { avsync_time_ = avsync_time; } void set_avsync_time(uint64_t avsync_time) { avsync_time_ = avsync_time; }
int64_t get_avsync_time() const { return avsync_time_; } uint64_t get_avsync_time() const { return avsync_time_; }
}; };
// Single payload data. // Single payload data.