Clik here to view.

With all the recent Goruck-ness it’s time to throw out a tech post. I’ve been prepping for a NodeJS presentation recently and wanted a unique demo. I had also come across the experimental build of Opera which supports accessing the native webcam of a mobile device. So I threw these two technologies together and came up with a Javascript powered way to stream a video camera from a mobile device to a bunch of desktop (or mobile) clients.
HTML5 had originally laid out support for a new element (the “device” element) but it appears this element has been scrapped now in favour of the “getUserMedia” API. So far I have only seen the Opera Mobile Experimental build support this but hopefully it’ll get into Dolphin and other mobile browsers soon.
http://my.opera.com/core/blog/2011/03/23/webcam-orientation-preview
If you just want to cut to the chase here’s a video of the final result:
There are three moving parts here
- a) The client page which captures the video
- b) the server which is a simple Node broadcast server
- c) and the Client which is just a web page that renders the output.
The client is just a copy-paste from Opera’s website. I take the video stream, render it to a canvas, then grab the canvas as a base64 encoded image. One image per frame. I could not find a way to limit the video’s size so the resulting image is fairly large (240×400 on an HTC Incredible). I down-sampled, cutting the image data to reduce the traffic over the wire. Framerates are still pretty poor but this is just an experiment.
The server is a generic websockets server and gets a message which is a raw data/base64 stream. It broadcasts this down the any connected clients.
The client then is the simplest of all, just connects to the socket server and renders whatever it gets to an Image may be NSFW.
Clik here to view. tag.
It’s all Javascript.
Client (this is a jade view).
h1 Remote Webcam using NodeJS, Opera, Web Sockets and HTML5/Canvas video(autoplay=true,id="sourcevid") canvas(id="output") div(id="log") script var log = function(msg) { document.getElementById('log').innerHTML = document.getElementById('log').innerHTML + msg + "<br/>"; }; var video = document.getElementsByTagName('video')[0], heading = document.getElementsByTagName('h1')[0]; if(navigator.getUserMedia) { navigator.getUserMedia('video', successCallback, errorCallback); function successCallback( stream ) { video.src = stream; }; function errorCallback( error ) { heading.textContent = "An error occurred: [CODE " + error.code + "]"; }; } else { heading.textContent = "Native web camera streaming is not supported in this browser!"; }; var back = document.createElement('canvas'); var backcontext = back.getContext('2d'); var ws; if('WebSocket' in window){ connect('ws://192.168.2.100:8080/'); } else { log ('web sockets not supported'); } function connect(host) { ws = new WebSocket(host); ws.onopen = function () { log('connected'); }; ws.onclose = function () { log('socket closed'); }; ws.onerror = function (evt) { log('<span style="color: red;">ERROR:</span> ' + evt.data); }; }; function send(msg){ if (ws != null) { if(ws.readyState === 1) { ws.send(msg); } } else { //log ('not ready yet'); } } cw = 120;//240;//video.clientWidth; ch = 200;//400;//video.clientHeight; log('width = ' + ch); back.width = cw; back.height = ch; draw(video, backcontext, cw, ch); function draw(v, bc, w, h) { // First, draw it into the backing canvas bc.drawImage(v, 0, 0, w, h); // Grab the pixel data from the backing canvas var stringData=back.toDataURL(); // send it on the wire send(stringData); // Start over! 10 frames a second = 100milliseconds setTimeout(function(){ draw(v, bc, w, h); }); }
The server. Simple Broadcast app.
var sys = require("sys"), ws = require("./ws.js"); var clients = []; ws.createServer( function (websocket) { clients.push(websocket); websocket.addListener("connect", function (resource) { // emitted after handshake sys.debug("connect: " + resource); }).addListener("data", function (data) { // handle incoming data // send data to ALL clients whenever ANY client send up data for (var i = 0 ; i < clients.length ; i ++ ) { clients[i].write(data); } }).addListener("close", function () { // emitted when server or client closes connection sys.debug("close"); }); }).listen(8080); sys.debug("Listening on port 8080");
The client.
<img src="" id="frame" style="width:240px;height:400px"/> <div id="log"></div> <script type="text/javascript"> var img; function Init() { img = document.getElementById("frame"); } $(document).ready(function () { Init(); }); // Web socket connection stuff is next... if('WebSocket' in window){ connect('ws://localhost:8080/'); } else { log ('web sockets not supported'); } var ws; function connect(host) { ws = new WebSocket(host); ws.onopen = function () { log('connected'); }; ws.onmessage = function (evt) { if (evt.data != null) { if ((evt.data[0]=== "d") &amp;amp;amp;&amp;amp;amp; (evt.data[1]==="a") ) img.src=evt.data; //log('got' + evt.data); } }; ws.onclose = function () { log('socket closed'); }; ws.onerror = function (evt) { log('<span style="color: red;">ERROR:</span> ' + evt.data); }; }; function log(msg){ document.getElementById('log').innerHTML = msg + "<br/>" + document.getElementById('log').innerHTML ; } </script>
Instead of broadcasting, you could store these frames in a MongoDB for assembly later. This would make the ultimate, storage independent video camera. Never run out of storage again!
I really need to start storing these snippets on github…
P.S. Everyone knows insects are clockwork which is why god made so many of them. Today’s image is from http://www.insectlabstudio.com/