2020-09-29 17:04:23 +02:00
|
|
|
package webrtc
|
2020-09-24 11:24:13 +02:00
|
|
|
|
|
|
|
import (
|
2020-10-05 00:45:22 +02:00
|
|
|
"bufio"
|
2020-09-24 11:24:13 +02:00
|
|
|
"fmt"
|
2020-10-05 00:45:22 +02:00
|
|
|
"github.com/pion/rtp"
|
2020-09-24 11:24:13 +02:00
|
|
|
"io"
|
2020-09-24 19:01:26 +02:00
|
|
|
"log"
|
2020-09-24 11:24:13 +02:00
|
|
|
"math/rand"
|
2020-10-05 00:45:22 +02:00
|
|
|
"net"
|
2020-09-24 11:24:13 +02:00
|
|
|
"os"
|
2020-10-05 00:45:22 +02:00
|
|
|
"os/exec"
|
2020-09-24 11:24:13 +02:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/pion/webrtc/v3"
|
|
|
|
"github.com/pion/webrtc/v3/pkg/media"
|
|
|
|
"github.com/pion/webrtc/v3/pkg/media/ivfreader"
|
|
|
|
"github.com/pion/webrtc/v3/pkg/media/oggreader"
|
2020-09-29 18:17:55 +02:00
|
|
|
"gitlab.crans.org/nounous/ghostream/internal/monitoring"
|
2020-10-04 18:22:10 +02:00
|
|
|
"gitlab.crans.org/nounous/ghostream/stream/srt"
|
2020-09-24 11:24:13 +02:00
|
|
|
)
|
|
|
|
|
2020-09-29 17:04:23 +02:00
|
|
|
// Options holds web package configuration
|
|
|
|
type Options struct {
|
|
|
|
MinPortUDP uint16
|
|
|
|
MaxPortUDP uint16
|
2020-09-29 17:27:19 +02:00
|
|
|
|
|
|
|
STUNServers []string
|
2020-09-29 17:04:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// SessionDescription contains SDP data
|
|
|
|
// to initiate a WebRTC connection between one client and this app
|
|
|
|
type SessionDescription = webrtc.SessionDescription
|
|
|
|
|
2020-09-24 11:24:13 +02:00
|
|
|
const (
|
|
|
|
audioFileName = "output.ogg"
|
2020-10-05 00:45:22 +02:00
|
|
|
videoFileName = "toto.ivf"
|
2020-09-24 11:24:13 +02:00
|
|
|
)
|
|
|
|
|
2020-09-24 19:01:26 +02:00
|
|
|
var (
|
2020-09-25 15:12:28 +02:00
|
|
|
videoTracks []*webrtc.Track
|
|
|
|
audioTracks []*webrtc.Track
|
2020-09-24 19:01:26 +02:00
|
|
|
)
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Helper to reslice tracks
|
|
|
|
func removeTrack(tracks []*webrtc.Track, track *webrtc.Track) []*webrtc.Track {
|
|
|
|
for i, t := range tracks {
|
|
|
|
if t == track {
|
|
|
|
return append(tracks[:i], tracks[i+1:]...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-09-29 18:17:55 +02:00
|
|
|
// GetNumberConnectedSessions get the number of currently connected clients
|
|
|
|
func GetNumberConnectedSessions() int {
|
|
|
|
return len(videoTracks)
|
|
|
|
}
|
|
|
|
|
2020-09-24 19:01:26 +02:00
|
|
|
// newPeerHandler is called when server receive a new session description
|
|
|
|
// this initiates a WebRTC connection and return server description
|
2020-09-29 17:04:23 +02:00
|
|
|
func newPeerHandler(remoteSdp webrtc.SessionDescription, cfg *Options) webrtc.SessionDescription {
|
2020-09-25 15:12:28 +02:00
|
|
|
// Create media engine using client SDP
|
|
|
|
mediaEngine := webrtc.MediaEngine{}
|
|
|
|
if err := mediaEngine.PopulateFromSDP(remoteSdp); err != nil {
|
|
|
|
log.Println("Failed to create new media engine", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
2020-09-24 19:01:26 +02:00
|
|
|
// Create a new PeerConnection
|
2020-09-29 14:11:56 +02:00
|
|
|
settingsEngine := webrtc.SettingEngine{}
|
2020-09-29 17:04:23 +02:00
|
|
|
if err := settingsEngine.SetEphemeralUDPPortRange(cfg.MinPortUDP, cfg.MaxPortUDP); err != nil {
|
2020-09-29 14:11:56 +02:00
|
|
|
log.Println("Failed to set min/max UDP ports", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
api := webrtc.NewAPI(
|
|
|
|
webrtc.WithMediaEngine(mediaEngine),
|
|
|
|
webrtc.WithSettingEngine(settingsEngine),
|
|
|
|
)
|
2020-09-25 12:03:40 +02:00
|
|
|
peerConnection, err := api.NewPeerConnection(webrtc.Configuration{
|
2020-09-29 17:27:19 +02:00
|
|
|
ICEServers: []webrtc.ICEServer{{URLs: cfg.STUNServers}},
|
2020-09-25 12:03:40 +02:00
|
|
|
})
|
2020-09-24 19:01:26 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Println("Failed to initiate peer connection", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Create video track
|
|
|
|
codec, payloadType := getPayloadType(mediaEngine, webrtc.RTPCodecTypeVideo, "VP8")
|
|
|
|
videoTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "video", "pion", codec)
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Failed to create new video track", err)
|
2020-09-24 19:01:26 +02:00
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
if _, err = peerConnection.AddTrack(videoTrack); err != nil {
|
|
|
|
log.Println("Failed to add video track", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Create audio track
|
|
|
|
codec, payloadType = getPayloadType(mediaEngine, webrtc.RTPCodecTypeAudio, "opus")
|
|
|
|
audioTrack, err := webrtc.NewTrack(payloadType, rand.Uint32(), "audio", "pion", codec)
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Failed to create new audio track", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
if _, err = peerConnection.AddTrack(audioTrack); err != nil {
|
|
|
|
log.Println("Failed to add audio track", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
2020-09-24 19:01:26 +02:00
|
|
|
// Set the remote SessionDescription
|
|
|
|
if err = peerConnection.SetRemoteDescription(remoteSdp); err != nil {
|
|
|
|
log.Println("Failed to set remote description", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create answer
|
|
|
|
answer, err := peerConnection.CreateAnswer(nil)
|
|
|
|
if err != nil {
|
|
|
|
log.Println("Failed to create answer", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create channel that is blocked until ICE Gathering is complete
|
|
|
|
gatherComplete := webrtc.GatheringCompletePromise(peerConnection)
|
|
|
|
|
|
|
|
// Sets the LocalDescription, and starts our UDP listeners
|
|
|
|
if err = peerConnection.SetLocalDescription(answer); err != nil {
|
|
|
|
log.Println("Failed to set local description", err)
|
|
|
|
return webrtc.SessionDescription{}
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Set the handler for ICE connection state
|
|
|
|
// This will notify you when the peer has connected/disconnected
|
|
|
|
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
|
|
|
|
log.Printf("Connection State has changed %s \n", connectionState.String())
|
|
|
|
if connectionState == webrtc.ICEConnectionStateConnected {
|
|
|
|
// Register tracks
|
|
|
|
videoTracks = append(videoTracks, videoTrack)
|
|
|
|
audioTracks = append(audioTracks, audioTrack)
|
2020-09-29 18:03:28 +02:00
|
|
|
monitoring.WebRTCConnectedSessions.Inc()
|
2020-09-25 15:12:28 +02:00
|
|
|
} else if connectionState == webrtc.ICEConnectionStateDisconnected {
|
|
|
|
// Unregister tracks
|
|
|
|
videoTracks = removeTrack(videoTracks, videoTrack)
|
|
|
|
audioTracks = removeTrack(audioTracks, audioTrack)
|
2020-09-29 18:03:28 +02:00
|
|
|
monitoring.WebRTCConnectedSessions.Dec()
|
2020-09-25 15:12:28 +02:00
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2020-09-24 19:01:26 +02:00
|
|
|
// Block until ICE Gathering is complete, disabling trickle ICE
|
|
|
|
// we do this because we only can exchange one signaling message
|
|
|
|
// in a production application you should exchange ICE Candidates via OnICECandidate
|
|
|
|
<-gatherComplete
|
|
|
|
|
|
|
|
// Output the local description and send it to browser
|
|
|
|
return *peerConnection.LocalDescription()
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
func playVideo() {
|
|
|
|
// Open a IVF file and start reading using our IVFReader
|
|
|
|
file, ivfErr := os.Open(videoFileName)
|
|
|
|
if ivfErr != nil {
|
|
|
|
panic(ivfErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
ivf, header, ivfErr := ivfreader.NewWith(file)
|
|
|
|
if ivfErr != nil {
|
|
|
|
panic(ivfErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Send our video file frame at a time. Pace our sending so we send it at the same speed it should be played back as.
|
|
|
|
// This isn't required since the video is timestamped, but we will such much higher loss if we send all at once.
|
|
|
|
sleepTime := time.Millisecond * time.Duration((float32(header.TimebaseNumerator)/float32(header.TimebaseDenominator))*1000)
|
|
|
|
for {
|
|
|
|
// Need at least one client
|
|
|
|
frame, _, ivfErr := ivf.ParseNextFrame()
|
|
|
|
if ivfErr == io.EOF {
|
|
|
|
fmt.Printf("All video frames parsed and sent")
|
|
|
|
os.Exit(0)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
2020-09-24 19:01:26 +02:00
|
|
|
|
|
|
|
if ivfErr != nil {
|
|
|
|
panic(ivfErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
time.Sleep(sleepTime)
|
|
|
|
for _, t := range videoTracks {
|
|
|
|
if ivfErr = t.WriteSample(media.Sample{Data: frame, Samples: 90000}); ivfErr != nil {
|
2020-09-25 12:03:40 +02:00
|
|
|
log.Fatalln("Failed to write video stream:", ivfErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
2020-09-24 19:01:26 +02:00
|
|
|
}
|
2020-09-25 15:12:28 +02:00
|
|
|
}
|
|
|
|
}
|
2020-09-24 19:01:26 +02:00
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
func playAudio() {
|
|
|
|
// Open a IVF file and start reading using our IVFReader
|
|
|
|
file, oggErr := os.Open(audioFileName)
|
|
|
|
if oggErr != nil {
|
|
|
|
panic(oggErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Open on oggfile in non-checksum mode.
|
|
|
|
ogg, _, oggErr := oggreader.NewWith(file)
|
|
|
|
if oggErr != nil {
|
|
|
|
panic(oggErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep track of last granule, the difference is the amount of samples in the buffer
|
|
|
|
var lastGranule uint64
|
|
|
|
for {
|
|
|
|
// Need at least one client
|
|
|
|
pageData, pageHeader, oggErr := ogg.ParseNextPage()
|
|
|
|
if oggErr == io.EOF {
|
|
|
|
fmt.Printf("All audio pages parsed and sent")
|
|
|
|
os.Exit(0)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
2020-09-24 19:01:26 +02:00
|
|
|
|
|
|
|
if oggErr != nil {
|
|
|
|
panic(oggErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// The amount of samples is the difference between the last and current timestamp
|
|
|
|
sampleCount := float64(pageHeader.GranulePosition - lastGranule)
|
|
|
|
lastGranule = pageHeader.GranulePosition
|
2020-09-24 11:24:13 +02:00
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
for _, t := range audioTracks {
|
|
|
|
if oggErr = t.WriteSample(media.Sample{Data: pageData, Samples: uint32(sampleCount)}); oggErr != nil {
|
2020-09-25 12:03:40 +02:00
|
|
|
log.Fatalln("Failed to write audio stream:", oggErr)
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
2020-09-24 19:01:26 +02:00
|
|
|
}
|
2020-09-24 11:24:13 +02:00
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Convert seconds to Milliseconds, Sleep doesn't accept floats
|
|
|
|
time.Sleep(time.Duration((sampleCount/48000)*1000) * time.Millisecond)
|
2020-09-24 19:01:26 +02:00
|
|
|
}
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Search for Codec PayloadType
|
|
|
|
//
|
|
|
|
// Since we are answering we need to match the remote PayloadType
|
2020-09-25 12:03:40 +02:00
|
|
|
func getPayloadType(m webrtc.MediaEngine, codecType webrtc.RTPCodecType, codecName string) (*webrtc.RTPCodec, uint8) {
|
2020-09-24 11:24:13 +02:00
|
|
|
for _, codec := range m.GetCodecsByKind(codecType) {
|
|
|
|
if codec.Name == codecName {
|
2020-09-25 12:03:40 +02:00
|
|
|
return codec, codec.PayloadType
|
2020-09-24 11:24:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
panic(fmt.Sprintf("Remote peer does not support %s", codecName))
|
|
|
|
}
|
2020-09-25 15:12:28 +02:00
|
|
|
|
2020-10-04 18:22:10 +02:00
|
|
|
func waitForPackets(inputChannel chan srt.Packet) {
|
2020-10-05 00:45:22 +02:00
|
|
|
// FIXME Clean code
|
|
|
|
var ffmpeg *exec.Cmd
|
|
|
|
var ffmpegInput io.WriteCloser
|
2020-10-04 18:22:10 +02:00
|
|
|
for {
|
|
|
|
var err error = nil
|
|
|
|
packet := <-inputChannel
|
|
|
|
switch packet.PacketType {
|
|
|
|
case "register":
|
|
|
|
log.Printf("WebRTC RegisterStream %s", packet.StreamName)
|
2020-10-05 00:45:22 +02:00
|
|
|
|
|
|
|
// Copied from https://github.com/pion/webrtc/blob/master/examples/rtp-to-webrtc/main.go
|
|
|
|
|
|
|
|
// Open a UDP Listener for RTP Packets on port 5004
|
|
|
|
videoListener, err := net.ListenUDP("udp", &net.UDPAddr{IP: net.ParseIP("127.0.0.1"), Port: 5004})
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
audioListener, err := net.ListenUDP("udp", &net.UDPAddr{IP: net.ParseIP("127.0.0.1"), Port: 5005})
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
if err = videoListener.Close(); err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
if err = audioListener.Close(); err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
ffmpeg = exec.Command("ffmpeg", "-re", "-i", "pipe:0",
|
|
|
|
"-an", "-vcodec", "libvpx", //"-cpu-used", "5", "-deadline", "1", "-g", "10", "-error-resilient", "1", "-auto-alt-ref", "1",
|
|
|
|
"-f", "rtp", "rtp://127.0.0.1:5004",
|
|
|
|
"-vn", "-acodec", "libopus", //"-cpu-used", "5", "-deadline", "1", "-g", "10", "-error-resilient", "1", "-auto-alt-ref", "1",
|
|
|
|
"-f", "rtp", "rtp://127.0.0.1:5005")
|
|
|
|
|
|
|
|
fmt.Println("Waiting for RTP Packets, please run GStreamer or ffmpeg now")
|
|
|
|
|
|
|
|
input, err := ffmpeg.StdinPipe()
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
ffmpegInput = input
|
|
|
|
errOutput, err := ffmpeg.StderrPipe()
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := ffmpeg.Start(); err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Receive video
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
// Listen for a single RTP Packet, we need this to determine the SSRC
|
|
|
|
inboundRTPPacket := make([]byte, 1500) // UDP MTU
|
|
|
|
n, _, err := videoListener.ReadFromUDP(inboundRTPPacket)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
packet := &rtp.Packet{}
|
|
|
|
if err := packet.Unmarshal(inboundRTPPacket[:n]); err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
log.Printf("[Video] %s", packet)
|
|
|
|
for _, videoTrack := range videoTracks {
|
|
|
|
if writeErr := videoTrack.WriteRTP(packet); writeErr != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
// Receive audio
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
// Listen for a single RTP Packet, we need this to determine the SSRC
|
|
|
|
inboundRTPPacket := make([]byte, 1500) // UDP MTU
|
|
|
|
n, _, err := audioListener.ReadFromUDP(inboundRTPPacket)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
packet := &rtp.Packet{}
|
|
|
|
if err := packet.Unmarshal(inboundRTPPacket[:n]); err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
log.Printf("[Audio] %s", packet)
|
|
|
|
for _, audioTrack := range audioTracks {
|
|
|
|
if writeErr := audioTrack.WriteRTP(packet); writeErr != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
scanner := bufio.NewScanner(errOutput)
|
|
|
|
for scanner.Scan() {
|
|
|
|
log.Printf("[WEBRTC FFMPEG %s] %s", "demo", scanner.Text())
|
|
|
|
}
|
|
|
|
}()
|
2020-10-04 18:22:10 +02:00
|
|
|
break
|
|
|
|
case "sendData":
|
2020-10-05 00:45:22 +02:00
|
|
|
// log.Printf("WebRTC SendPacket %s", packet.StreamName)
|
|
|
|
_, err := ffmpegInput.Write(packet.Data)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
2020-10-04 18:22:10 +02:00
|
|
|
break
|
|
|
|
case "close":
|
|
|
|
log.Printf("WebRTC CloseConnection %s", packet.StreamName)
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
log.Println("Unknown SRT packet type:", packet.PacketType)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Error occured while receiving SRT packet of type %s: %s", packet.PacketType, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:28 +02:00
|
|
|
// Serve WebRTC media streaming server
|
2020-10-04 18:22:10 +02:00
|
|
|
func Serve(remoteSdpChan, localSdpChan chan webrtc.SessionDescription, inputChannel chan srt.Packet, cfg *Options) {
|
2020-09-29 17:04:23 +02:00
|
|
|
log.Printf("WebRTC server using UDP from %d to %d", cfg.MinPortUDP, cfg.MaxPortUDP)
|
|
|
|
|
2020-10-04 18:22:10 +02:00
|
|
|
// FIXME: use data from inputChannel
|
|
|
|
go waitForPackets(inputChannel)
|
2020-10-05 00:45:22 +02:00
|
|
|
// go playVideo()
|
|
|
|
// go playAudio()
|
2020-09-25 15:12:28 +02:00
|
|
|
|
|
|
|
// Handle new connections
|
|
|
|
for {
|
|
|
|
// Wait for incoming session description
|
|
|
|
// then send the local description to browser
|
2020-10-04 20:16:29 +02:00
|
|
|
localSdpChan <- newPeerHandler(<-remoteSdpChan, cfg)
|
2020-09-25 15:12:28 +02:00
|
|
|
}
|
|
|
|
}
|