1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-03-09 15:49:59 +00:00

GB28181: Support GB28181-2016 protocol. v5.0.74 (#3201)

01. Support GB config as StreamCaster.
02. Support disable GB by --gb28181=off.
03. Add utests for SIP examples.
04. Wireshark plugin to decode TCP/9000 as rtp.rfc4571
05. Support MPEGPS program stream codec.
06. Add utest for PS stream codec.
07. Decode MPEGPS packet stream.
08. Carry RTP and PS packet as helper in PS message.
09. Support recover from error mode.
10. Support process by a pack of PS/TS messages.
11. Add statistic for recovered and msgs dropped.
12. Recover from err position fastly.
13. Define state machine for GB session.
14. Bind context to GB session.
15. Re-invite when media disconnected.
16. Update GitHub actions with GB28181.
17. Support parse CANDIDATE by env or pip.
18. Support mux GB28181 to RTMP.
19. Support regression test by srs-bench.
This commit is contained in:
Winlin 2022-10-06 17:40:58 +08:00 committed by GitHub
parent 9c81a0e1bd
commit 5a420ece3b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
298 changed files with 43343 additions and 763 deletions

View file

@ -0,0 +1,101 @@
package httphead
import "io"
var (
comma = []byte{','}
equality = []byte{'='}
semicolon = []byte{';'}
quote = []byte{'"'}
escape = []byte{'\\'}
)
// WriteOptions write options list to the dest.
// It uses the same form as {Scan,Parse}Options functions:
// values = 1#value
// value = token *( ";" param )
// param = token [ "=" (token | quoted-string) ]
//
// It wraps valuse into the quoted-string sequence if it contains any
// non-token characters.
func WriteOptions(dest io.Writer, options []Option) (n int, err error) {
w := writer{w: dest}
for i, opt := range options {
if i > 0 {
w.write(comma)
}
writeTokenSanitized(&w, opt.Name)
for _, p := range opt.Parameters.data() {
w.write(semicolon)
writeTokenSanitized(&w, p.key)
if len(p.value) != 0 {
w.write(equality)
writeTokenSanitized(&w, p.value)
}
}
}
return w.result()
}
// writeTokenSanitized writes token as is or as quouted string if it contains
// non-token characters.
//
// Note that is is not expects LWS sequnces be in s, cause LWS is used only as
// header field continuation:
// "A CRLF is allowed in the definition of TEXT only as part of a header field
// continuation. It is expected that the folding LWS will be replaced with a
// single SP before interpretation of the TEXT value."
// See https://tools.ietf.org/html/rfc2616#section-2
//
// That is we sanitizing s for writing, so there could not be any header field
// continuation.
// That is any CRLF will be escaped as any other control characters not allowd in TEXT.
func writeTokenSanitized(bw *writer, bts []byte) {
var qt bool
var pos int
for i := 0; i < len(bts); i++ {
c := bts[i]
if !OctetTypes[c].IsToken() && !qt {
qt = true
bw.write(quote)
}
if OctetTypes[c].IsControl() || c == '"' {
if !qt {
qt = true
bw.write(quote)
}
bw.write(bts[pos:i])
bw.write(escape)
bw.write(bts[i : i+1])
pos = i + 1
}
}
if !qt {
bw.write(bts)
} else {
bw.write(bts[pos:])
bw.write(quote)
}
}
type writer struct {
w io.Writer
n int
err error
}
func (w *writer) write(p []byte) {
if w.err != nil {
return
}
var n int
n, w.err = w.w.Write(p)
w.n += n
return
}
func (w *writer) result() (int, error) {
return w.n, w.err
}