1
0
Fork 0
mirror of https://github.com/ossrs/srs.git synced 2025-02-12 19:31:53 +00:00

Squash: Support github actions for CI

This commit is contained in:
winlin 2021-08-15 21:51:50 +08:00
parent d08f8fab8c
commit 3d58e98d1c
11 changed files with 243 additions and 186 deletions

View file

@ -1,18 +1,11 @@
name: "CodeQL"
on:
push:
branches: [ 4.0release, develop ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ 4.0release, develop ]
schedule:
# '20 19 * * 6' means 'At 19:20, only on Saturday' @see https://crontab.cronhub.io/
- cron: '20 19 * * 6'
# @see https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestbranchestags
on: [push, pull_request]
jobs:
analyze:
name: codeql-analyze
name: actions-codeql-analyze
runs-on: ubuntu-latest
strategy:

View file

@ -1,5 +1,6 @@
name: "Release"
# @see https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestbranchestags
on:
push:
tags:

138
.github/workflows/test.yml vendored Normal file
View file

@ -0,0 +1,138 @@
name: "Test"
# @see https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestbranchestags
on: [push, pull_request]
jobs:
build-centos:
name: actions-test-build-centos
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Build for CentOS7
- name: Build on CentOS7, baseline
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos7-baseline .
- name: Build on CentOS7, with SRT
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos7-srt .
- name: Build on CentOS7, without WebRTC
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos7-no-webrtc .
- name: Build on CentOS7, without ASM
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos7-no-asm .
- name: Build on CentOS7, C++98, no FFmpeg
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos7-ansi-no-ffmpeg .
# Build for CentOS8
- name: Build on CentOS8, baseline
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos8-baseline .
- name: Build on CentOS8, with SRT
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target centos8-srt .
build-ubuntu:
name: actions-test-build-ubuntu
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Build for Ubuntu16
- name: Build on Ubuntu16, baseline
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu16-baseline .
- name: Build on Ubuntu16, with SRT
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu16-srt .
# Build for Ubuntu18
- name: Build on Ubuntu18, baseline
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu18-baseline .
- name: Build on Ubuntu18, with SRT
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu18-srt .
# Build for Ubuntu20
- name: Build on Ubuntu20, baseline
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu20-baseline .
- name: Build on Ubuntu20, with SRT
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu20-srt .
build-cross:
name: actions-test-build-cross
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Cross Build for ARMv7
- name: Cross Build for ARMv7
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu16-cross-armv7 .
# Cross Build for AARCH64
- name: Cross Build for AARCH64
run: DOCKER_BUILDKIT=1 docker build -f trunk/Dockerfile.builds --target ubuntu16-cross-aarch64 .
build:
name: actions-test-build
needs:
- build-centos
- build-ubuntu
- build-cross
runs-on: ubuntu-20.04
steps:
- run: echo 'Build done'
utest:
name: actions-test-utest
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
################################################################
# Tests
- name: Build test image
run: docker build --tag srs:test -f trunk/Dockerfile.test .
# For utest
- name: Run SRS utest
run: docker run --rm srs:test bash -c 'make && ./objs/srs_utest'
# For regression-test
- name: Run SRS regression-test
run: docker run --rm srs:test bash -c 'make && ./objs/srs -c conf/regression-test.conf && cd 3rdparty/srs-bench && make && ./objs/srs_test -test.v'
coverage:
name: actions-test-coverage
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
################################################################
# Tests
- name: Build coverage image
run: docker build --tag srs:cov -f trunk/Dockerfile.cov .
# For coverage
- name: Run SRS covergae
if: ${{ startsWith(github.ref, 'refs/heads/') || startsWith(github.ref, 'refs/pull/') }}
run: |
# The hash of commit.
SRS_SHA=${{ github.sha }}
# Note that the root of SRS, must contains .git, for report fixing.
SRS_PROJECT=/srs
# The github.ref is, for example, refs/heads/4.0release
SRS_BRANCH=$(echo ${{ github.ref }}| awk -F 'refs/heads/' '{print $2}'| awk -F '/' '{print $1}')
# The github.ref is, for example, refs/pull/2536/merge
SRS_PR=$(echo ${{ github.ref }}| awk -F 'refs/pull/' '{print $2}'| awk -F '/' '{print $1}')
echo "For ref=${{ github.ref }}, sha=${{ github.sha }}, SRS_BRANCH=$SRS_BRANCH, SRS_PR=$SRS_PR, SRS_SHA=$SRS_SHA, SRS_PROJECT=$SRS_PROJECT"
docker run --rm --env SRS_BRANCH=$SRS_BRANCH --env SRS_PR=$SRS_PR --env SRS_SHA=$SRS_SHA --env SRS_PROJECT=$SRS_PROJECT \
srs:cov bash -c 'make && ./objs/srs_utest && bash auto/codecov.sh'
done:
name: actions-test-done
needs:
- build
- utest
- coverage
runs-on: ubuntu-20.04
steps:
- run: echo 'All done'

View file

@ -3,7 +3,7 @@
![](http://ossrs.net/gif/v1/sls.gif?site=github.com&path=/srs/develop)
[![](https://github.com/ossrs/srs/actions/workflows/codeql-analysis.yml/badge.svg?branch=develop)](https://github.com/ossrs/srs/actions?query=workflow%3ACodeQL+branch%3Adevelop)
[![](https://github.com/ossrs/srs/actions/workflows/release.yml/badge.svg)](https://github.com/ossrs/srs/actions/workflows/release.yml?query=workflow%3ARelease)
[![](https://circleci.com/gh/ossrs/srs/tree/develop.svg?style=svg&circle-token=1ef1d5b5b0cde6c8c282ed856a18199f9e8f85a9)](https://circleci.com/gh/ossrs/srs/tree/develop)
[![](https://github.com/ossrs/srs/actions/workflows/test.yml/badge.svg?branch=develop)](https://github.com/ossrs/srs/actions?query=workflow%3ATest+branch%3Adevelop)
[![](https://codecov.io/gh/ossrs/srs/branch/develop/graph/badge.svg)](https://codecov.io/gh/ossrs/srs/branch/develop)
[![](https://gitee.com/winlinvip/srs-wiki/raw/master/images/wechat-badge.png)](../../wikis/Contact#wechat)
[![](https://gitee.com/winlinvip/srs-wiki/raw/master/images/bbs2.png)](http://bbs.ossrs.net)

1
trunk/.gitignore vendored
View file

@ -38,6 +38,7 @@
./srs
*.dSYM/
*.gcov
*.gcno
*.ts
*.h264
*.264

View file

@ -637,155 +637,6 @@ func TestRtcBasic_PublishPlay(t *testing.T) {
}()
}
// When republish a stream, the player stream SHOULD be continuous.
func TestRtcBasic_Republish(t *testing.T) {
ctx := logger.WithContext(context.Background())
ctx, cancel := context.WithTimeout(ctx, time.Duration(*srsTimeout)*time.Millisecond)
var r0, r1, r2, r3, r4 error
defer func(ctx context.Context) {
if err := filterTestError(ctx.Err(), r0, r1, r2, r3, r4); err != nil {
t.Errorf("Fail for err %+v", err)
} else {
logger.Tf(ctx, "test done with err %+v", err)
}
}(ctx)
var resources []io.Closer
defer func() {
for _, resource := range resources {
_ = resource.Close()
}
}()
var wg sync.WaitGroup
defer wg.Wait()
// The event notify.
var thePublisher, theRepublisher *testPublisher
var thePlayer *testPlayer
mainReady, mainReadyCancel := context.WithCancel(context.Background())
publishReady, publishReadyCancel := context.WithCancel(context.Background())
republishReady, republishReadyCancel := context.WithCancel(context.Background())
// Objects init.
wg.Add(1)
go func() {
defer wg.Done()
defer cancel()
doInit := func() (err error) {
streamSuffix := fmt.Sprintf("basic-publish-play-%v-%v", os.Getpid(), rand.Int())
// Initialize player with private api.
if thePlayer, err = newTestPlayer(registerDefaultCodecs, func(play *testPlayer) error {
play.streamSuffix = streamSuffix
resources = append(resources, play)
var nnPlayReadRTP uint64
return play.Setup(*srsVnetClientIP, func(api *testWebRTCAPI) {
api.registry.Add(newRTPInterceptor(func(i *rtpInterceptor) {
i.rtpReader = func(payload []byte, attributes interceptor.Attributes) (int, interceptor.Attributes, error) {
select {
case <-republishReady.Done():
if nnPlayReadRTP++; nnPlayReadRTP >= uint64(*srsPlayOKPackets) {
cancel() // Completed.
}
logger.Tf(ctx, "Play recv rtp %v packets", nnPlayReadRTP)
default:
logger.Tf(ctx, "Play recv rtp packet before republish")
}
return i.nextRTPReader.Read(payload, attributes)
}
}))
})
}); err != nil {
return err
}
// Initialize publisher with private api.
if thePublisher, err = newTestPublisher(registerDefaultCodecs, func(pub *testPublisher) error {
pub.streamSuffix = streamSuffix
pub.iceReadyCancel = publishReadyCancel
resources = append(resources, pub)
var nnPubReadRTCP uint64
return pub.Setup(*srsVnetClientIP, func(api *testWebRTCAPI) {
api.registry.Add(newRTCPInterceptor(func(i *rtcpInterceptor) {
i.rtcpReader = func(buf []byte, attributes interceptor.Attributes) (int, interceptor.Attributes, error) {
nn, attr, err := i.nextRTCPReader.Read(buf, attributes)
if nnPubReadRTCP++; nnPubReadRTCP > 0 && pub.cancel != nil {
pub.cancel() // We only cancel the publisher itself.
}
logger.Tf(ctx, "Publish recv rtcp %v packets", nnPubReadRTCP)
return nn, attr, err
}
}))
})
}); err != nil {
return err
}
// Initialize re-publisher with private api.
if theRepublisher, err = newTestPublisher(registerDefaultCodecs, func(pub *testPublisher) error {
pub.streamSuffix = streamSuffix
pub.iceReadyCancel = republishReadyCancel
resources = append(resources, pub)
return pub.Setup(*srsVnetClientIP)
}); err != nil {
return err
}
// Init done.
mainReadyCancel()
<-ctx.Done()
return nil
}
if err := doInit(); err != nil {
r1 = err
}
}()
// Run publisher.
wg.Add(1)
go func() {
defer wg.Done()
defer cancel()
select {
case <-ctx.Done():
case <-mainReady.Done():
pubCtx, pubCancel := context.WithCancel(ctx)
r2 = thePublisher.Run(logger.WithContext(pubCtx), pubCancel)
logger.Tf(ctx, "pub done, re-publish again")
// Dispose the stream.
_ = thePublisher.Close()
r4 = theRepublisher.Run(logger.WithContext(ctx), cancel)
logger.Tf(ctx, "re-pub done")
}
}()
// Run player.
wg.Add(1)
go func() {
defer wg.Done()
defer cancel()
select {
case <-ctx.Done():
case <-publishReady.Done():
r3 = thePlayer.Run(logger.WithContext(ctx), cancel)
logger.Tf(ctx, "play done")
}
}()
}
// The srs-server is DTLS server(passive), srs-bench is DTLS client which is active mode.
// No.1 srs-bench: ClientHello
// No.2 srs-server: ServerHello, Certificate, ServerKeyExchange, CertificateRequest, ServerHelloDone

View file

@ -4,8 +4,8 @@ FROM ossrs/srs:dev AS build
RUN yum install -y gcc make gcc-c++ patch unzip perl git
# Build and install SRS.
COPY . /trunk
WORKDIR /trunk
COPY . /srs
WORKDIR /srs/trunk
RUN ./configure --srt=on --jobs=2 && make -j2 && make install
# All config files for SRS.

67
trunk/Dockerfile.builds Normal file
View file

@ -0,0 +1,67 @@
########################################################
FROM ossrs/srs:dev AS centos7-baseline
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 && make -j2
FROM ossrs/srs:dev AS centos7-no-webrtc
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --rtc=off && make -j2
FROM ossrs/srs:dev AS centos7-no-asm
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --nasm=off --srtp-nasm=off && make -j2
FROM ossrs/srs:dev AS centos7-srt
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --srt=on && make -j2
FROM ossrs/srs:dev AS centos7-ansi-no-ffmpeg
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --cxx11=off --cxx14=off --ffmpeg-fit=off && make -j2
########################################################
FROM ossrs/srs:dev8 AS centos8-baseline
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 && make -j2
FROM ossrs/srs:dev8 AS centos8-srt
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --srt=on && make -j2
########################################################
FROM ossrs/srs:ubuntu16 AS ubuntu16-baseline
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 && make -j2
FROM ossrs/srs:ubuntu16 AS ubuntu16-srt
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --srt=on && make -j2
########################################################
FROM ossrs/srs:ubuntu18 AS ubuntu18-baseline
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 && make -j2
FROM ossrs/srs:ubuntu18 AS ubuntu18-srt
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --srt=on && make -j2
########################################################
FROM ossrs/srs:ubuntu20 AS ubuntu20-baseline
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 && make -j2
FROM ossrs/srs:ubuntu20 AS ubuntu20-srt
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --srt=on && make -j2
########################################################
FROM ossrs/srs:ubuntu16 AS ubuntu16-cross-armv7
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --cross-build --cc=arm-linux-gnueabihf-gcc --cxx=arm-linux-gnueabihf-g++ \
--ar=arm-linux-gnueabihf-ar --ld=arm-linux-gnueabihf-ld --randlib=arm-linux-gnueabihf-randlib && make -j2
FROM ossrs/srs:ubuntu16 AS ubuntu16-cross-aarch64
COPY . /srs
RUN cd /srs/trunk && ./configure --jobs=2 --cross-build --cc=aarch64-linux-gnu-gcc --cxx=aarch64-linux-gnu-g++ \
--ar=aarch64-linux-gnu-ar --ld=aarch64-linux-gnu-ld --randlib=aarch64-linux-gnu-randlib && make -j2

9
trunk/Dockerfile.cov Normal file
View file

@ -0,0 +1,9 @@
FROM ossrs/srs:dev
# Install depends tools.
RUN yum install -y gcc make gcc-c++ patch unzip perl git
# Build and install SRS.
COPY . /srs
WORKDIR /srs/trunk
RUN ./configure --with-utest --gcov --jobs=2 && make -j2

View file

@ -4,8 +4,8 @@ FROM ossrs/srs:dev
RUN yum install -y gcc make gcc-c++ patch unzip perl git
# Build and install SRS.
COPY . /trunk
WORKDIR /trunk
COPY . /srs
WORKDIR /srs/trunk
RUN ./configure --srt=on --utest=on --jobs=2 && make -j2
RUN cd 3rdparty/srs-bench && make

View file

@ -8,29 +8,28 @@
# Workdir is objs/cover.
workdir=`pwd`/objs/cover
# Tool git is required to map the right path.
git --version >/dev/null 2>&1
ret=$?; if [[ $ret -ne 0 ]]; then echo "Tool git is required, ret=$ret"; exit $ret; fi
# Create trunk under workdir.
mkdir -p $workdir && cd $workdir
ret=$?; if [[ $ret -ne 0 ]]; then echo "Enter workdir failed, ret=$ret"; exit $ret; fi
# Collect all *.gcno and *.gcda to objs/cover.
cd $workdir && (rm -rf src && cp -R ../../src . && cp -R ../src/* src/)
ret=$?; if [[ $ret -ne 0 ]]; then echo "Collect *.gcno and *.gcda failed, ret=$ret"; exit $ret; fi
# Generate *.gcov for coverage.
cd $workdir &&
for file in `find src -name "*.cpp"|grep -v utest`; do
gcov $file -o `dirname $file`
ret=$?; if [[ $ret -ne 0 ]]; then echo "Collect $file failed, ret=$ret"; exit $ret; fi
done
# Cook the gcov files.
cd $workdir &&
find . -name "*.gcov"|grep -v srs|xargs rm -f
ret=$?; if [[ $ret -ne 0 ]]; then echo "Cook gcov files failed, ret=$ret"; exit $ret; fi
CODECOV_ARGS=""
if [[ $SRS_PROJECT != '' ]]; then
# -R root dir Used when not in git/hg project to identify project root directory
# -p dir Project root directory. Also used when preparing gcov
CODECOV_ARGS="$CODECOV_ARGS -R $SRS_PROJECT -p $SRS_PROJECT"
fi
if [[ $SRS_BRANCH != '' ]]; then
# -B branch Specify the branch name
CODECOV_ARGS="$CODECOV_ARGS -B $SRS_BRANCH"
fi
if [[ $SRS_SHA != '' ]]; then
# -C sha Specify the commit sha
CODECOV_ARGS="$CODECOV_ARGS -C $SRS_SHA"
fi
if [[ $SRS_PR != '' ]]; then
# -P pr Specify the pull request number
CODECOV_ARGS="$CODECOV_ARGS -P $SRS_PR"
fi
# Upload report with *.gcov
# Remark: The file codecov.yml is not neccessary. It literally depends on git.
@ -39,9 +38,7 @@ ret=$?; if [[ $ret -ne 0 ]]; then echo "Cook gcov files failed, ret=$ret"; exit
# https://codecov.io/gh/ossrs/srs/src/20fbb4466fdc8ba5d810b8570df6004063212838/trunk/src/protocol/srs_rtmp_stack.cpp
# Remark: It takes a few minutes to sync with github, so it might not available when CircleCI is done.
# https://circleci.com/gh/ossrs/srs/tree/3.0release
#
# Note: Use '-X gcov' to avoid generate the gcov files again.
cd $workdir &&
export CODECOV_TOKEN="493bba46-c468-4e73-8b45-8cdd8ff62d96" &&
bash <(curl -s https://codecov.io/bash) -X gcov &&
bash <(curl -s https://codecov.io/bash) $CODECOV_ARGS &&
echo "Done" && exit 0