Skip to content

Commit 56530ab

Browse files
nikismarkandrus
authored andcommitted
New examples (#12)
1 parent 5919dcd commit 56530ab

File tree

9 files changed

+339
-0
lines changed

9 files changed

+339
-0
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ This project presents a few example applications using node-webrtc.
1515
- [video-compositing](examples/video-compositing): uses RTCVideoSink,
1616
[node-canvas](https://github.com/Automattic/node-canvas), and RTCVideoSource
1717
to draw spinning text on top of an incoming video.
18+
- [Stream recording](examples/stream-record) using ffmpeg and RTCVideoSink
19+
- SFU example with one [broadcaster](examples/sfu-broadcast) which forward stream to many [clients](examples/sfu-watch)
1820

1921
Usage
2022
-----

examples/sfu-broadcast/client.js

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
'use strict';
2+
3+
const createExample = require('../../lib/browser/example');
4+
5+
const description = 'Example of <a href="https://webrtcglossary.com/sfu/" target="_blank">SFU</a> <br/><br/>\
6+
Start broadcast and then your stream will be forward using RTCAudioSink, RTCVideoSink, RTCAudioSource, RTCVideoSource';
7+
8+
const localVideo = document.createElement('video');
9+
localVideo.autoplay = true;
10+
localVideo.muted = true;
11+
12+
async function beforeAnswer(peerConnection) {
13+
const localStream = await window.navigator.mediaDevices.getUserMedia({
14+
audio: true,
15+
video: true
16+
});
17+
18+
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
19+
20+
localVideo.srcObject = localStream;
21+
22+
// NOTE(mroberts): This is a hack so that we can get a callback when the
23+
// RTCPeerConnection is closed. In the future, we can subscribe to
24+
// "connectionstatechange" events.
25+
const { close } = peerConnection;
26+
peerConnection.close = function() {
27+
localVideo.srcObject = null;
28+
29+
localStream.getTracks().forEach(track => track.stop());
30+
31+
return close.apply(this, arguments);
32+
};
33+
}
34+
35+
createExample('sfu-broadcast', description, { beforeAnswer });
36+
37+
const videos = document.createElement('div');
38+
videos.className = 'grid';
39+
videos.appendChild(localVideo);
40+
document.body.appendChild(videos);

examples/sfu-broadcast/server.js

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
'use strict';
2+
3+
const { EventEmitter } = require('stream')
4+
5+
const { RTCAudioSource, RTCVideoSource, RTCAudioSink, RTCVideoSink } = require('wrtc').nonstandard;
6+
7+
let audioTrack, videoTrack;
8+
9+
const broadcastEvent = new EventEmitter()
10+
const { on } = broadcastEvent;
11+
broadcastEvent.on = function(event, callback) {
12+
/**
13+
* trigger event in case we already have a client that broadcast
14+
*/
15+
if(event === 'newBroadcast' && audioTrack && videoTrack) {
16+
callback.apply(this, [{
17+
audioTrack,
18+
videoTrack
19+
}])
20+
}
21+
return on.apply(this, arguments);
22+
}
23+
24+
function beforeOffer(peerConnection) {
25+
const videoSource = new RTCVideoSource();
26+
const audioSource = new RTCAudioSource();
27+
28+
videoTrack = videoSource.createTrack();
29+
audioTrack = audioSource.createTrack();
30+
31+
broadcastEvent.emit('newBroadcast', {
32+
audioTrack,
33+
videoTrack
34+
})
35+
36+
const audioSink = new RTCAudioSink(peerConnection.addTransceiver('audio').receiver.track);
37+
const videoSink = new RTCVideoSink(peerConnection.addTransceiver('video').receiver.track);
38+
39+
audioSink.addEventListener('data', function(data){
40+
audioSource.onData(data);
41+
})
42+
43+
videoSink.addEventListener('frame', ({ frame })=>{
44+
videoSource.onFrame(frame);
45+
});
46+
47+
const { close } = peerConnection;
48+
peerConnection.close = function() {
49+
audioSink.stop();
50+
videoSink.stop();
51+
videoTrack.stop()
52+
audioTrack.stop()
53+
return close.apply(this, arguments);
54+
};
55+
56+
}
57+
58+
module.exports = {
59+
beforeOffer,
60+
event: broadcastEvent
61+
};

examples/sfu-watch/client.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
'use strict';
2+
3+
const createExample = require('../../lib/browser/example');
4+
5+
const description = 'Example of <a href="https://webrtcglossary.com/sfu/" target="_blank">SFU</a> <br/><br/>\
6+
Start receive sfu-broadcaster stream.';
7+
8+
const remoteVideo = document.createElement('video');
9+
remoteVideo.autoplay = true;
10+
11+
async function beforeAnswer(peerConnection) {
12+
const remoteStream = new MediaStream(peerConnection.getReceivers().map(receiver => receiver.track));
13+
remoteVideo.srcObject = remoteStream;
14+
15+
// NOTE(mroberts): This is a hack so that we can get a callback when the
16+
// RTCPeerConnection is closed. In the future, we can subscribe to
17+
// "connectionstatechange" events.
18+
const { close } = peerConnection;
19+
peerConnection.close = function() {
20+
remoteVideo.srcObject = null;
21+
return close.apply(this, arguments);
22+
};
23+
}
24+
25+
createExample('sfu-watch', description, { beforeAnswer });
26+
27+
const videos = document.createElement('div');
28+
videos.className = 'grid';
29+
videos.appendChild(remoteVideo);
30+
document.body.appendChild(videos);

examples/sfu-watch/server.js

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
'use strict';
2+
3+
const { event } = require('../sfu-broadcast/server')
4+
5+
function beforeOffer(peerConnection) {
6+
const audioTransceiver = peerConnection.addTransceiver('audio');
7+
const videoTransceiver = peerConnection.addTransceiver('video');
8+
9+
const onNewBroadcast = ({ audioTrack, videoTrack })=>{
10+
audioTransceiver.sender.replaceTrack(audioTrack),
11+
videoTransceiver.sender.replaceTrack(videoTrack)
12+
};
13+
14+
event.on('newBroadcast', onNewBroadcast)
15+
16+
const { close } = peerConnection;
17+
peerConnection.close = function() {
18+
event.removeListener('newBroadcast', onNewBroadcast);
19+
return close.apply(this, arguments);
20+
}
21+
}
22+
23+
module.exports = { beforeOffer };

examples/stream-record/client.js

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
'use strict';
2+
3+
const createExample = require('../../lib/browser/example');
4+
5+
const description = 'Transcode and record audio and video into different video resolutions and then merge into single file.';
6+
7+
const localVideo = document.createElement('video');
8+
localVideo.autoplay = true;
9+
localVideo.muted = true;
10+
11+
async function beforeAnswer(peerConnection) {
12+
const localStream = await window.navigator.mediaDevices.getUserMedia({
13+
audio: true,
14+
video: true
15+
});
16+
17+
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
18+
19+
localVideo.srcObject = localStream;
20+
21+
// NOTE(mroberts): This is a hack so that we can get a callback when the
22+
// RTCPeerConnection is closed. In the future, we can subscribe to
23+
// "connectionstatechange" events.
24+
const { close } = peerConnection;
25+
peerConnection.close = function() {
26+
localVideo.srcObject = null;
27+
28+
localStream.getTracks().forEach(track => track.stop());
29+
30+
return close.apply(this, arguments);
31+
};
32+
}
33+
34+
createExample('stream-record', description, { beforeAnswer });
35+
36+
const videos = document.createElement('div');
37+
videos.className = 'grid';
38+
videos.appendChild(localVideo);
39+
document.body.appendChild(videos);

examples/stream-record/server.js

Lines changed: 140 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,140 @@
1+
'use strict';
2+
3+
const { PassThrough } = require('stream')
4+
5+
const { RTCAudioSink, RTCVideoSink } = require('wrtc').nonstandard;
6+
7+
const ffmpeg = require('fluent-ffmpeg')
8+
const { StreamInput } = require('fluent-ffmpeg-multistream')
9+
const fs = require('fs')
10+
11+
const VIDEO_OUTPUT_SIZE = '320x240'
12+
const VIDEO_OUTPUT_FILE = './recording.mp4'
13+
14+
let UID = 0;
15+
16+
function beforeOffer(peerConnection) {
17+
const audioTransceiver = peerConnection.addTransceiver('audio');
18+
const videoTransceiver = peerConnection.addTransceiver('video');
19+
20+
const audioSink = new RTCAudioSink(audioTransceiver.receiver.track);
21+
const videoSink = new RTCVideoSink(videoTransceiver.receiver.track);
22+
23+
const streams = [];
24+
25+
videoSink.addEventListener('frame', function({ frame: { width, height, data }}){
26+
const size = width + 'x' + height;
27+
if(!streams[0] || (streams[0] && streams[0].size !== size)) {
28+
UID++;
29+
30+
const stream = {
31+
recordPath: './recording-' + size + '-' + UID + '.mp4',
32+
size,
33+
video: new PassThrough(),
34+
audio: new PassThrough()
35+
};
36+
37+
const onAudioData = function({ samples: { buffer } }) {
38+
if(!stream.end) {
39+
stream.audio.push(Buffer.from(buffer));
40+
}
41+
};
42+
43+
audioSink.addEventListener('data', onAudioData)
44+
45+
stream.audio.on('end', ()=>{
46+
audioSink.removeEventListener('data', onAudioData)
47+
})
48+
49+
streams.unshift(stream)
50+
51+
streams.forEach(item=>{
52+
if(item !== stream && !item.end) {
53+
item.end = true;
54+
if(item.audio) {
55+
item.audio.end();
56+
}
57+
item.video.end();
58+
}
59+
})
60+
61+
stream.proc = ffmpeg()
62+
.addInput((new StreamInput(stream.video)).url)
63+
.addInputOptions([
64+
'-f', 'rawvideo',
65+
'-pix_fmt', 'yuv420p',
66+
'-s', stream.size,
67+
'-r', '30',
68+
])
69+
.addInput((new StreamInput(stream.audio)).url)
70+
.addInputOptions([
71+
'-f s16le',
72+
'-ar 48k',
73+
'-ac 1',
74+
])
75+
.on('start', ()=>{
76+
console.log('Start recording >> ', stream.recordPath)
77+
})
78+
.on('end', ()=>{
79+
stream.recordEnd = true;
80+
console.log('Stop recording >> ', stream.recordPath)
81+
})
82+
.size(VIDEO_OUTPUT_SIZE)
83+
.output(stream.recordPath)
84+
85+
stream.proc.run()
86+
}
87+
88+
streams[0].video.push(Buffer.from(data));
89+
})
90+
91+
const { close } = peerConnection;
92+
peerConnection.close = function() {
93+
audioSink.stop();
94+
videoSink.stop();
95+
96+
streams.forEach(({ audio, video, end, proc, recordPath })=>{
97+
if(!end) {
98+
if(audio) {
99+
audio.end();
100+
}
101+
video.end();
102+
}
103+
})
104+
105+
let totalEnd = 0;
106+
const timer = setInterval(()=>{
107+
streams.forEach(stream=>{
108+
if(stream.recordEnd) {
109+
totalEnd++;
110+
if(totalEnd === streams.length) {
111+
clearTimeout(timer);
112+
113+
const mergeProc = ffmpeg()
114+
.on('start', ()=>{
115+
console.log('Start merging into ' + VIDEO_OUTPUT_FILE);
116+
})
117+
.on('end', ()=>{
118+
streams.forEach(({ recordPath })=>{
119+
fs.unlinkSync(recordPath);
120+
})
121+
console.log('Merge end. You can play ' + VIDEO_OUTPUT_FILE);
122+
});
123+
124+
streams.forEach(({ recordPath })=>{
125+
mergeProc.addInput(recordPath)
126+
})
127+
128+
mergeProc
129+
.output(VIDEO_OUTPUT_FILE)
130+
.run();
131+
}
132+
}
133+
})
134+
},1000)
135+
136+
return close.apply(this, arguments);
137+
}
138+
}
139+
140+
module.exports = { beforeOffer };

html/index.html

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,4 +47,6 @@ <h1>node-webrtc examples</h1>
4747
<li><a href="/sine-wave/index.html">sine-wave
4848
<li><a href="/sine-wave-stereo/index.html">sine-wave-stereo
4949
<li><a href="/video-compositing/index.html">video-compositing
50+
<li><a href="/stream-record/index.html">record-audio-video-stream
51+
<li><a href="/sfu-broadcast/index.html">sfu-broadcast</a> & <a href="/sfu-watch/index.html">sfu-watch
5052
<script src="index.js"></script>

package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
"canvas": "^2.4.1",
2525
"color-space": "^1.16.0",
2626
"express": "^4.16.4",
27+
"fluent-ffmpeg": "^2.1.2",
28+
"fluent-ffmpeg-multistream": "^1.0.0",
2729
"node-fetch": "^2.3.0",
2830
"uuid": "^3.3.2",
2931
"wrtc": "^0.4.1"

0 commit comments

Comments
 (0)