初体验webrtc-视频web展示
我们介绍过webrtc-JavaScript源码信息、以及接口信息。
如何最简单使用和体验一下webrtc。 我们提供一个简单案例。
如果你会python、golang,python你选择aiortc、golang选择pion-webrtc
我们举例python的aiortc。
pip install aiortc
pip install opencv-python
安装完成后,
建立前端index.html代码:
<html>
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WebRTC demo</title>
<style>
button {
padding: 8px 16px;
}
pre {
overflow-x: hidden;
overflow-y: auto;
}
video {
width: 100%;
}
.option {
margin-bottom: 8px;
}
#media {
max-width: 1280px;
}
</style>
</head>
<body>
<h2>Options</h2>
<div class="option">
<input id="use-datachannel" checked="checked" type="checkbox"/>
<label for="use-datachannel">Use datachannel</label>
<select id="datachannel-parameters">
<option value='{"ordered": true}'>Ordered, reliable</option>
<option value='{"ordered": false, "maxRetransmits": 0}'>Unordered, no retransmissions</option>
<option value='{"ordered": false, "maxPacketLifetime": 500}'>Unordered, 500ms lifetime</option>
</select>
</div>
<div class="option">
<input id="use-audio" checked="checked" type="checkbox"/>
<label for="use-audio">Use audio</label>
<select id="audio-codec">
<option value="default" selected>Default codecs</option>
<option value="opus/48000/2">Opus</option>
<option value="PCMU/8000">PCMU</option>
<option value="PCMA/8000">PCMA</option>
</select>
</div>
<div class="option">
<input id="use-video" type="checkbox"/>
<label for="use-video">Use video</label>
<select id="video-resolution">
<option value="" selected>Default resolution</option>
<option value="320x240">320x240</option>
<option value="640x480">640x480</option>
<option value="960x540">960x540</option>
<option value="1280x720">1280x720</option>
</select>
<select id="video-transform">
<option value="none" selected>No transform</option>
<option value="edges">Edge detection</option>
<option value="cartoon">Cartoon effect</option>
<option value="rotate">Rotate</option>
</select>
<select id="video-codec">
<option value="default" selected>Default codecs</option>
<option value="VP8/90000">VP8</option>
<option value="H264/90000">H264</option>
</select>
</div>
<div class="option">
<input id="use-stun" type="checkbox"/>
<label for="use-stun">Use STUN server</label>
</div>
<button id="start" onclick="start()">Start</button>
<button id="stop" style="display: none" onclick="stop()">Stop</button>
<h2>State</h2>
<p>
ICE gathering state: <span id="ice-gathering-state"></span>
</p>
<p>
ICE connection state: <span id="ice-connection-state"></span>
</p>
<p>
Signaling state: <span id="signaling-state"></span>
</p>
<div id="media" style="display: none">
<h2>Media</h2>
<audio id="audio" autoplay="true"></audio>
<video id="video" autoplay="true" playsinline="true"></video>
</div>
<h2>Data channel</h2>
<pre id="data-channel" style="height: 200px;"></pre>
<h2>SDP</h2>
<h3>Offer</h3>
<pre id="offer-sdp"></pre>
<h3>Answer</h3>
<pre id="answer-sdp"></pre>
<script src="client.js"></script>
</body>
</html>
js 文件代码
// get DOM elements
var dataChannelLog = document.getElementById('data-channel'),
iceConnectionLog = document.getElementById('ice-connection-state'),
iceGatheringLog = document.getElementById('ice-gathering-state'),
signalingLog = document.getElementById('signaling-state');
// peer connection
var pc = null;
// data channel
var dc = null, dcInterval = null;
function createPeerConnection() {
var config = {
sdpSemantics: 'unified-plan'
};
if (document.getElementById('use-stun').checked) {
config.iceServers = [{urls: ['stun:stun.l.google.com:19302']}];
}
pc = new RTCPeerConnection(config);
// register some listeners to help debugging
pc.addEventListener('icegatheringstatechange', function() {
iceGatheringLog.textContent += ' -> ' + pc.iceGatheringState;
}, false);
iceGatheringLog.textContent = pc.iceGatheringState;
pc.addEventListener('iceconnectionstatechange', function() {
iceConnectionLog.textContent += ' -> ' + pc.iceConnectionState;
}, false);
iceConnectionLog.textContent = pc.iceConnectionState;
pc.addEventListener('signalingstatechange', function() {
signalingLog.textContent += ' -> ' + pc.signalingState;
}, false);
signalingLog.textContent = pc.signalingState;
// connect audio / video
pc.addEventListener('track', function(evt) {
if (evt.track.kind == 'video')
document.getElementById('video').srcObject = evt.streams[0];
else
document.getElementById('audio').srcObject = evt.streams[0];
});
return pc;
}
function negotiate() {
return pc.createOffer().then(function(offer) {
return pc.setLocalDescription(offer);
}).then(function() {
// wait for ICE gathering to complete
return new Promise(function(resolve) {
if (pc.iceGatheringState === 'complete') {
resolve();
} else {
function checkState() {
if (pc.iceGatheringState === 'complete') {
pc.removeEventListener('icegatheringstatechange', checkState);
resolve();
}
}
pc.addEventListener('icegatheringstatechange', checkState);
}
});
}).then(function() {
var offer = pc.localDescription;
var codec;
codec = document.getElementById('audio-codec').value;
if (codec !== 'default') {
offer.sdp = sdpFilterCodec('audio', codec, offer.sdp);
}
codec = document.getElementById('video-codec').value;
if (codec !== 'default') {
offer.sdp = sdpFilterCodec('video', codec, offer.sdp);
}
document.getElementById('offer-sdp').textContent = offer.sdp;
return fetch('/offer', {
body: JSON.stringify({
sdp: offer.sdp,
type: offer.type,
video_transform: document.getElementById('video-transform').value
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
});
}).then(function(response) {
return response.json();
}).then(function(answer) {
document.getElementById('answer-sdp').textContent = answer.sdp;
return pc.setRemoteDescription(answer);
}).catch(function(e) {
alert(e);
});
}
function start() {
document.getElementById('start').style.display = 'none';
pc = createPeerConnection();
var time_start = null;
function current_stamp() {
if (time_start === null) {
time_start = new Date().getTime();
return 0;
} else {
return new Date().getTime() - time_start;
}
}
if (document.getElementById('use-datachannel').checked) {
var parameters = JSON.parse(document.getElementById('datachannel-parameters').value);
dc = pc.createDataChannel('chat', parameters);
dc.onclose = function() {
clearInterval(dcInterval);
dataChannelLog.textContent += '- close\n';
};
dc.onopen = function() {
dataChannelLog.textContent += '- open\n';
dcInterval = setInterval(function() {
var message = 'ping ' + current_stamp();
dataChannelLog.textContent += '> ' + message + '\n';
dc.send(message);
}, 1000);
};
dc.onmessage = function(evt) {
dataChannelLog.textContent += '< ' + evt.data + '\n';
if (evt.data.substring(0, 4) === 'pong') {
var elapsed_ms = current_stamp() - parseInt(evt.data.substring(5), 10);
dataChannelLog.textContent += ' RTT ' + elapsed_ms + ' ms\n';
}
};
}
var constraints = {
audio: document.getElementById('use-audio').checked,
video: false
};
if (document.getElementById('use-video').checked) {
var resolution = document.getElementById('video-resolution').value;
if (resolution) {
resolution = resolution.split('x');
constraints.video = {
width: parseInt(resolution[0], 0),
height: parseInt(resolution[1], 0)
};
} else {
constraints.video = true;
}
}
if (constraints.audio || constraints.video) {
if (constraints.video) {
document.getElementById('media').style.display = 'block';
}
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
stream.getTracks().forEach(function(track) {
pc.addTrack(track, stream);
});
return negotiate();
}, function(err) {
alert('Could not acquire media: ' + err);
});
} else {
negotiate();
}
document.getElementById('stop').style.display = 'inline-block';
}
function stop() {
document.getElementById('stop').style.display = 'none';
// close data channel
if (dc) {
dc.close();
}
// close transceivers
if (pc.getTransceivers) {
pc.getTransceivers().forEach(function(transceiver) {
if (transceiver.stop) {
transceiver.stop();
}
});
}
// close local audio / video
pc.getSenders().forEach(function(sender) {
sender.track.stop();
});
// close peer connection
setTimeout(function() {
pc.close();
}, 500);
}
function sdpFilterCodec(kind, codec, realSdp) {
var allowed = []
var rtxRegex = new RegExp('a=fmtp:(\\d+) apt=(\\d+)\r$');
var codecRegex = new RegExp('a=rtpmap:([0-9]+) ' + escapeRegExp(codec))
var videoRegex = new RegExp('(m=' + kind + ' .*?)( ([0-9]+))*\\s*$')
var lines = realSdp.split('\n');
var isKind = false;
for (var i = 0; i < lines.length; i++) {
if (lines[i].startsWith('m=' + kind + ' ')) {
isKind = true;
} else if (lines[i].startsWith('m=')) {
isKind = false;
}
if (isKind) {
var match = lines[i].match(codecRegex);
if (match) {
allowed.push(parseInt(match[1]));
}
match = lines[i].match(rtxRegex);
if (match && allowed.includes(parseInt(match[2]))) {
allowed.push(parseInt(match[1]));
}
}
}
var skipRegex = 'a=(fmtp|rtcp-fb|rtpmap):([0-9]+)';
var sdp = '';
isKind = false;
for (var i = 0; i < lines.length; i++) {
if (lines[i].startsWith('m=' + kind + ' ')) {
isKind = true;
} else if (lines[i].startsWith('m=')) {
isKind = false;
}
if (isKind) {
var skipMatch = lines[i].match(skipRegex);
if (skipMatch && !allowed.includes(parseInt(skipMatch[2]))) {
continue;
} else if (lines[i].match(videoRegex)) {
sdp += lines[i].replace(videoRegex, '$1 ' + allowed.join(' ')) + '\n';
} else {
sdp += lines[i] + '\n';
}
} else {
sdp += lines[i] + '\n';
}
}
return sdp;
}
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string
}
最后是python服务代码
import argparse
import asyncio
import json
import logging
import os
import ssl
import uuid
import cv2
from aiohttp import web
from aiortc import MediaStreamTrack, RTCPeerConnection, RTCSessionDescription
from aiortc.contrib.media import MediaBlackhole, MediaPlayer, MediaRecorder, MediaRelay
from av import VideoFrame
ROOT = os.path.dirname(__file__)
logger = logging.getLogger("pc")
pcs = set()
relay = MediaRelay()
class VideoTransformTrack(MediaStreamTrack):
"""
A video stream track that transforms frames from an another track.
"""
kind = "video"
def __init__(self, track, transform):
super().__init__() # don't forget this!
self.track = track
self.transform = transform
async def recv(self):
frame = await self.track.recv()
if self.transform == "cartoon":
img = frame.to_ndarray(format="bgr24")
# prepare color
img_color = cv2.pyrDown(cv2.pyrDown(img))
for _ in range(6):
img_color = cv2.bilateralFilter(img_color, 9, 9, 7)
img_color = cv2.pyrUp(cv2.pyrUp(img_color))
# prepare edges
img_edges = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
img_edges = cv2.adaptiveThreshold(
cv2.medianBlur(img_edges, 7),
255,
cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY,
9,
2,
)
img_edges = cv2.cvtColor(img_edges, cv2.COLOR_GRAY2RGB)
# combine color and edges
img = cv2.bitwise_and(img_color, img_edges)
# rebuild a VideoFrame, preserving timing information
new_frame = VideoFrame.from_ndarray(img, format="bgr24")
new_frame.pts = frame.pts
new_frame.time_base = frame.time_base
return new_frame
elif self.transform == "edges":
# perform edge detection
img = frame.to_ndarray(format="bgr24")
img = cv2.cvtColor(cv2.Canny(img, 100, 200), cv2.COLOR_GRAY2BGR)
# rebuild a VideoFrame, preserving timing information
new_frame = VideoFrame.from_ndarray(img, format="bgr24")
new_frame.pts = frame.pts
new_frame.time_base = frame.time_base
return new_frame
elif self.transform == "rotate":
# rotate image
img = frame.to_ndarray(format="bgr24")
rows, cols, _ = img.shape
M = cv2.getRotationMatrix2D((cols / 2, rows / 2), frame.time * 45, 1)
img = cv2.warpAffine(img, M, (cols, rows))
# rebuild a VideoFrame, preserving timing information
new_frame = VideoFrame.from_ndarray(img, format="bgr24")
new_frame.pts = frame.pts
new_frame.time_base = frame.time_base
return new_frame
else:
return frame
async def index(request):
content = open(os.path.join(ROOT, "index.html"), "r").read()
return web.Response(content_type="text/html", text=content)
async def javascript(request):
content = open(os.path.join(ROOT, "client.js"), "r").read()
return web.Response(content_type="application/javascript", text=content)
async def offer(request):
params = await request.json()
print(params)
offer = RTCSessionDescription(sdp=params["sdp"], type=params["type"])
pc = RTCPeerConnection()
pc_id = "PeerConnection(%s)" % uuid.uuid4()
pcs.add(pc)
def log_info(msg, *args):
logger.info(pc_id + " " + msg, *args)
log_info("Created for %s", request.remote)
# prepare local media
player = MediaPlayer(os.path.join(ROOT, "demo-instruct.wav"))
if args.record_to:
recorder = MediaRecorder(args.record_to)
else:
recorder = MediaBlackhole()
@pc.on("datachannel")
def on_datachannel(channel):
@channel.on("message")
def on_message(message):
if isinstance(message, str) and message.startswith("ping"):
channel.send("pong" + message[4:])
@pc.on("connectionstatechange")
async def on_connectionstatechange():
log_info("Connection state is %s", pc.connectionState)
if pc.connectionState == "failed":
await pc.close()
pcs.discard(pc)
@pc.on("track")
def on_track(track):
log_info("Track %s received", track.kind)
if track.kind == "audio":
pc.addTrack(player.audio)
recorder.addTrack(track)
elif track.kind == "video":
pc.addTrack(
VideoTransformTrack(
relay.subscribe(track), transform=params["video_transform"]
)
)
if args.record_to:
recorder.addTrack(relay.subscribe(track))
@track.on("ended")
async def on_ended():
log_info("Track %s ended", track.kind)
await recorder.stop()
# handle offer
await pc.setRemoteDescription(offer)
await recorder.start()
# send answer
answer = await pc.createAnswer()
await pc.setLocalDescription(answer)
return web.Response(
content_type="application/json",
text=json.dumps(
{"sdp": pc.localDescription.sdp, "type": pc.localDescription.type}
),
)
async def on_shutdown(app):
# close peer connections
coros = [pc.close() for pc in pcs]
await asyncio.gather(*coros)
pcs.clear()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="WebRTC audio / video / data-channels demo"
)
parser.add_argument("--cert-file", help="SSL certificate file (for HTTPS)")
parser.add_argument("--key-file", help="SSL key file (for HTTPS)")
parser.add_argument(
"--host", default="0.0.0.0", help="Host for HTTP server (default: 0.0.0.0)"
)
parser.add_argument(
"--port", type=int, default=8080, help="Port for HTTP server (default: 8080)"
)
parser.add_argument("--record-to", help="Write received media to a file."),
parser.add_argument("--verbose", "-v", action="count")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
if args.cert_file:
ssl_context = ssl.SSLContext()
ssl_context.load_cert_chain(args.cert_file, args.key_file)
else:
ssl_context = None
app = web.Application()
app.on_shutdown.append(on_shutdown)
app.router.add_get("/", index)
app.router.add_get("/client.js", javascript)
app.router.add_post("/offer", offer)
web.run_app(
app, access_log=None, host=args.host, port=args.port, ssl_context=ssl_context
)
因为webrtc需要,https等证书,当然本地127.0.0.1 是可以的。 所以,你查看python代码里面提供了web服务,切记不要直接打开html文件,应请求
127.0.0.1:8080/
那就可以享受web-rtc给你带来的视频体验了。