HTML5 Tutorial
Michel Buffa
April 2012
buffa@unice.fr
Labs exercices:
This presentation focuses on some aspects of HTML5, see for example http://html5rocks.com for a complete overview.
<!-- Wow ! --> <!doctype html> <html lang="fr"> <head> <meta charset="utf-8"> <title>Page title</title> <!-- Notice the rel attribute ! --> <link rel="stylesheet" href="style.css"> <!-- Notice : no attribute type= --> <script src="script.js"></script> </head> <body> <!-- Some content --> </body> </html>

HTML5 adds 13 new form types : email, tel, color, url, date, datetime, datetime-local, month, week, time, range, number, search.
Built-in validation system
New elements <dataset> for autocompletion, <output> for feedback, etc.
Other goodies...
In my browser :
How it shows in Opera :
Interactive example for Opera Users !
Polyfill consists in using jsColor
In my browser :
How it shows in Opera :
Pops up a contextual keyboard on smartphones
Other date types: datetime, datetime-local, time, week, month...
<input type="email"/> <input type="email" multiple value="foo@bar.org, bob@sponge.org" required/>
In my browser :
Works with pseudo CSS classes :required :optional :invalid
Default styling for valid/invalid input in some web browsers.
<input type="email" required/>
Invalid input implies that the CSS class :invalid is set.
Keyboard input :
Submission time :
<input type="number" name="quantity" min="1" max="5" />
In my browser :
<input type="range" name="n" min="1" max="10"/>
In my browser :
<input type="tel" placeholder="(555) 555-5555"
pattern="^\(?\d{3}\)?[-\s]\d{3}[-\s]\d{4}.*?$"/>
In my browser :
<input type="url" name="homepage"/>
In my browser :
<input id="mysearch2" type="search" placeholder="search"/>
In my browser :
<input type="text" x-webkit-speech name="search_value"/>
In my browser :
<meter value=75 min=0 max=100 low=20 high=98>75%</meter>
Grades:
<meter min=50 max=200 low=90 high=119 optimum=100></meter>
Blood Pressure:
var displayCoords=document.getElementById("msg");
function getLocation() {
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(showPosition);
} else {
displayCoords.innerHTML="Geolocation API not supported !";
}
}
function showPosition(position) {
displayCoords.innerHTML="Latitude: " + position.coords.latitude +
"<br />Longitude: " + position.coords.longitude;
}
<canvas id="myCanvas">Canvas not supported.</canvas>
<script type="text/javascript">
var canvas=document.querySelector('#myCanvas1');
var ctx=canvas.getContext('2d');
ctx.fillStyle='#FF0000';
ctx.fillRect(0,0,80,100);
</script>
// Create a pixel Array
var imageData = context.createImageData(width, height);
// Or grab canvas content as pixels
var imageData2 = context.getImageData(x, y, width, height);
// do something with the data
// modify canvas content
context.putImageData(imageData, 0, 0);
Good usage : the Pixtatic JavaScript library...
var imageObj = new Image();
imageObj.onload = function(){
context.drawImage(imageObj, destX, destY);
// draw full image but change size
context.drawImage(imageObj, destX, destY, width, height);
// Draw subpart of source + resize
context.drawImage(imageObj,sourceX, sourceY, sourceW,sourceH,
destX, destY, destW, destH);
};
imageObj.src = "darth-vader.jpg";
<object width="425" height="344">
<param name="movie"
value="http://www.youtube.com/v/9sEI1AUFJKw&hl=en_GB&fs=1&">
</param>
<param name="allowFullScreen" value="true"></param>
<param name="allowscriptaccess" value="always"></param>
<embed src="http://www.youtube.com/v/9sEI1AUFJKw&hl=en_GB&fs=1&"
type="application/x-shockwave-flash" allowscriptaccess="always"
allowfullscreen="true"
width="425" height="344">
</embed>
</object>
The HTML5 <video> tag !
<video controls autoplay>
<source src=video.webm type=video/webm>
<source src=video.ogg type=video/ogg>
<source src=video.mp4 type=video/mp4>
</video>
context.drawImage() can take another canvas as first parameter!
Can take a video element also ! Demo!
<video id="videoTest" width="400" controls>
<source src="video.webm" type="video/webm"/>
</video>
<canvas width="400" height="300"></canvas>
<script>
var video = document.getElementById('videoTest');
var canvas = document.querySelector('canvas');
var ctx = canvas.getContext('2d');
// Draws current image from the video element into the canvas
ctx.drawImage(video, 0,0, canvas.width, canvas.height);
</script>
<style>
video {
width: 100px;
transition: all 0.5s ease-in-out;
}
video:hover, video:focus {
width: 600px;
transform: rotate(45deg);
}
</style>
Principle :
Requirement : get a web browser that supports the getUserMedia API.
<video id="output" autoplay autoplay>Fallback msg here.</video>
<script>
function onSuccess(stream) {
var output = document.getElementById('output').src = stream;
}
function onError() {
// getUserMedia API not supported, or another application is using the webcam !
}
if (navigator.getUserMedia) {
navigator.getUserMedia('video', onSuccess, onError);
}
</script>
<audio controls="controls">
<source src="http://www.w3schools.com/html5/song.ogg"
type="audio/ogg" />
<source src="http://www.w3schools.com/html5/song.mp3"
type="audio/mp3" />
Your browser does not support the audio element.
</audio>
try {
var context = new webkitAudioContext();
...
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
Sound samples must be loaded in RAM before use,
Games, music editing, sequencers, real time processing, synthetizers,
Graph model for processing sound in real time, connect filters, etc.
var dogBarkingBuffer = null;
var context = new webkitAudioContext();
function loadDogSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
dogBarkingBuffer = buffer;
}, onError);
}
request.send();
}
var context = new webkitAudioContext();
function playSound(buffer) {
// create sound source
var source = context.createBufferSource();
// What to play, an audio buffer
source.buffer = buffer;
// connect source to desination (speakers in that case)
source.connect(context.destination);
// Play the sound
source.noteOn(0);
}
// Create the filter
var filter = context.createBiquadFilter();
// Create the audio graph.
source.connect(filter);
filter.connect(context.destination);
// Create and specify parameters for the low-pass filter.
filter.type = 0; // Low-pass filter. See BiquadFilterNode docs
filter.frequency.value = 440; // Set cutoff to 440 HZ
// Playback the sound.
source.noteOn(0);
var context = new webkitAudioContext();
// Create a source node with 1024, the number of samples to generate
// on each call. 0, our node has NO INPUTS as far as we just want to generate sound
// 2 for stereo...
var node = context.createJavaScriptNode(1024, 0, 2);
// Specify the audio generation function
node.onaudioprocess = generateAudio;
// Connect the node to a destination, i.e. the audio output.
node.connect(context.destination);
Ajax / Long Polling (Comet)
HTTP overhead: headers sent for each request/response
Full duplex, bidirectional realtime communication channel,
Share HTTP ports used by HTTP serve
Traverse firewalls
Keep the connection open
Adds only 2 bytes per frame (data between 0x00 and 0xFF)
Data encoded in UTF-8 (no binary!)
Bye bye Flash Sockets!

Check at http://websocket.org
Must be activated in Opera, supported by IE 10
var ws = new WebSocket("ws://host:port");
ws.onopen = function(evt) { alert("Connection open!"); };
ws.onmessage = function(evt) { alert( "Received message: " + evt.data); };
ws.onclose = function(evt) { alert("Connection closed."); };
ws.onerror = function(evt) { alert("WebSocket error : " + evt.data) };
ws.send(MyMessage);
ws.close();
Many popular servers support WebSockets
socket.io (JavaScipt)
nowJS
Client code:
now.receiveMessage = function(name, message){
alert("msg from" + name + ": " + message);
}
$("#send-button").click(function(){
now.distributeMessage($("#text-input").val());
});
now.name = prompt("What's your name?", "");
Server code:
everyone.now.distributeMessage = function(message){
everyone.now.receiveMessage(this.now.name, message);
};