I have a question about using TensorFlow.js to detect objects with webcam. Currently I am using the pre-trained model coco-ssd.
index.html:
<html lang="en">
<head>
<title>Multiple object detection using pre trained model in TensorFlow.js</title>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Import the webpage's stylesheet -->
<link rel="stylesheet" href="style.css">
</head>
<body>
<h1>Multiple object detection using pre trained model in TensorFlow.js</h1>
<p>Wait for the model to load before clicking the button to enable the webcam - at which point it will become visible to use.</p>
<section id="demos" class="invisible">
<p>Hold some objects up close to your webcam to get a real-time classification! When ready click "enable webcam" below and accept access to the webcam when the browser asks (check the top left of your window)</p>
<div id="liveView" class="camView">
<button id="webcamButton">Enable Webcam</button>
<video id="webcam" autoplay width="640" height="480"></video>
</div>
</section>
<!-- Import TensorFlow.js library -->
<script src="https://cdn.jsdelivr.net/npm/#tensorflow/tfjs/dist/tf.min.js" type="text/javascript"></script>
<!-- Load the coco-ssd model to use to recognize things in images -->
<script src="https://cdn.jsdelivr.net/npm/#tensorflow-models/coco-ssd"></script>
<!-- Import the page's JavaScript to do some stuff -->
<script src="script.js" defer></script>
</body>
</html>
script.js:
const video = document.getElementById('webcam');
const liveView = document.getElementById('liveView');
const demosSection = document.getElementById('demos');
const enableWebcamButton = document.getElementById('webcamButton');
// Check if webcam access is supported.
function getUserMediaSupported() {
return !!(navigator.mediaDevices &&
navigator.mediaDevices.getUserMedia);
}
// If webcam supported, add event listener to button for when user
// wants to activate it to call enableCam function which we will
// define in the next step.
if (getUserMediaSupported()) {
enableWebcamButton.addEventListener('click', enableCam);
} else {
console.warn('getUserMedia() is not supported by your browser');
}
// Enable the live webcam view and start classification.
function enableCam(event) {
// Only continue if the COCO-SSD has finished loading.
if (!model) {
return;
}
// Hide the button once clicked.
event.target.classList.add('removed');
// getUsermedia parameters to force video but not audio.
const constraints = {
video: true
};
// Activate the webcam stream.
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
video.srcObject = stream;
video.addEventListener('loadeddata', predictWebcam);
});
}
// Store the resulting model in the global scope of our app.
var model = undefined;
// Before we can use COCO-SSD class we must wait for it to finish
// loading. Machine Learning models can be large and take a moment
// to get everything needed to run.
// Note: cocoSsd is an external object loaded from our index.html
// script tag import so ignore any warning in Glitch.
cocoSsd.load().then(function (loadedModel) {
model = loadedModel;
// Show demo section now model is ready to use.
demosSection.classList.remove('invisible');
});
var children = [];
function predictWebcam() {
// Now let's start classifying a frame in the stream.
model.detect(video).then(function (predictions) {
// Remove any highlighting we did previous frame.
for (let i = 0; i < children.length; i++) {
liveView.removeChild(children[i]);
}
children.splice(0);
// Now lets loop through predictions and draw them to the live view if
// they have a high confidence score.
for (let n = 0; n < predictions.length; n++) {
// If we are over 66% sure we are sure we classified it right, draw it!
if (predictions[n].score > 0.66) {
const p = document.createElement('p');
p.innerText = predictions[n].class + ' - with '
+ Math.round(parseFloat(predictions[n].score) * 100)
+ '% confidence.';
p.style = 'margin-left: ' + predictions[n].bbox[0] + 'px; margin-top: '
+ (predictions[n].bbox[1] - 10) + 'px; width: '
+ (predictions[n].bbox[2] - 10) + 'px; top: 0; left: 0;';
const highlighter = document.createElement('div');
highlighter.setAttribute('class', 'highlighter');
highlighter.style = 'left: ' + predictions[n].bbox[0] + 'px; top: '
+ predictions[n].bbox[1] + 'px; width: '
+ predictions[n].bbox[2] + 'px; height: '
+ predictions[n].bbox[3] + 'px;';
liveView.appendChild(highlighter);
liveView.appendChild(p);
children.push(highlighter);
children.push(p);
}
}
// Call this function again to keep predicting when the browser is ready.
window.requestAnimationFrame(predictWebcam);
});
}
Now I would like to customize the script to use my own model, which I previously created and trained with Tensorflow for Python. I have already converted it with the converter tfjs_convert into a .json format.
Files of my own model
How do I modify my code so that my own model is now used? I have already tried a few things, but unfortunately have not made any progress.
You can use loadGraphModel from #tensorflow/tfjs-converter to load from Json.
I love this example.
Related
Background
I have this code to visualize audio with HTML canvas. It works fine on Safari:
window.MyNamespace = {} // Easy access to some vars by namespace.
// Some browsers require a user gesture before creating Web Audio API context.
MyNamespace.firstUserGesture = true
MyNamespace.audio = {}
MyNamespace.audio.el = document.getElementById("aud")
MyNamespace.audio.canvasEl = document.getElementById("canvasEl")
function setupCanvas() {
MyNamespace.audio.canvasEl.style.width = "88%"
MyNamespace.audio.canvasEl.style.height = "100px"
// High dpi stuff.
// Controls the chart line stroke thickness.
MyNamespace.audio.canvasEl.width = 500 * 2
MyNamespace.audio.canvasEl.height = 100 * 2
// Get canvas context.
MyNamespace.audio.canvasCtx = MyNamespace.audio.canvasEl.getContext("2d")
// Set stroke color.
MyNamespace.audio.canvasCtx.strokeStyle = "#4285F4"
// Draw twice as thick lines due to high dpi scaling.
MyNamespace.audio.canvasCtx.lineWidth = 2
// Animation reference.
MyNamespace.audio.canvasAnimation = {}
MyNamespace.audio.el.addEventListener('play', () => drawWave())
MyNamespace.audio.el.addEventListener('pause', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
MyNamespace.audio.el.addEventListener('ended', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
let N = 0 // Data count to show.
N = MyNamespace.audio.analyzer.fftSize
MyNamespace.audio.data = new Uint8Array(N)
// Our drawing method
function drawWave() {
MyNamespace.audio.analyzer.getByteTimeDomainData(MyNamespace.audio.data)
const WIDTH = MyNamespace.audio.canvasEl.width
const HEIGHT = MyNamespace.audio.canvasEl.height
const bufferLength = MyNamespace.audio.data.length
const dataArray = MyNamespace.audio.data
// https://github.com/mdn/voice-change-o-matic-float-data/blob/c745ba8b48d7a9b93661ac43da2886633c06f2a7/scripts/app.js#L190
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(255, 255, 255)'
MyNamespace.audio.canvasCtx.fillRect(0, 0, WIDTH, HEIGHT)
var barWidth = (WIDTH / bufferLength) * 2.5
var barHeight
var x = 0
for (var j = 0; j < bufferLength; j++) {
// 128 means no sound? 128 corresponds to 0? Log indicates so!
// Multiply by 2 to make it more noticable.
barHeight = Math.abs(dataArray[j] - 128) * 2
//console.log('bar height', barHeight)
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(' + Math.floor(barHeight + 66) + ',133,244)'
MyNamespace.audio.canvasCtx.fillRect(x, Math.abs(HEIGHT - barHeight) / 2, barWidth, barHeight)
x += barWidth + 1
}
// Animate
MyNamespace.audio.canvasAnimation = requestAnimationFrame(drawWave)
}
}
function createAudio() {
if (MyNamespace.firstUserGesture) {
// Note: As a consequence of calling createMediaElementSource(),
// audio playback from the HTMLMediaElement will be re-routed into the processing graph of the AudioContext.
// So playing/pausing the media can still be done through the media element API and the player controls.
if ('webkitAudioContext' in window) {
MyNamespace.ctx = new webkitAudioContext()
console.log('webkitAudioContext() is available.')
} else if ('AudioContext' in window) {
MyNamespace.ctx = new AudioContext()
console.log('AudioContext() is available.')
} else {
console.log('Web Audio API is not available.')
}
MyNamespace.audio.source = MyNamespace.ctx.createMediaElementSource(MyNamespace.audio.el)
MyNamespace.audio.analyzer = MyNamespace.ctx.createAnalyser()
MyNamespace.audio.source.connect(MyNamespace.audio.analyzer)
MyNamespace.audio.source.connect(MyNamespace.ctx.destination)
console.log('Web Audio API is all set')
// We are sure Web Audio API context is ready.
setupCanvas()
}
MyNamespace.firstUserGesture = false
}
function handleClick() {
// The AudioContext is not allowed to start before user gesture.
// It must be resumed (or created) after a user gesture on the page.
createAudio()
// Play/pause the media
if (MyNamespace.audio.el.paused) {
MyNamespace.audio.el.play()
} else {
MyNamespace.audio.el.pause()
}
}
MyNamespace.audio.btnEl = document.getElementById("btn")
MyNamespace.audio.btnEl.addEventListener('click', handleClick)
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width" />
<title>Micrufun</title>
<link rel="shortcut icon" type="image/x-icon" href="logo.ico" />
</head>
<body>
<div style="width: 100%;">
<button id="btn">Play/pause</button>
</div>
<div style="width: 100%;">
<audio id="aud" style="width: 50%;" preload="metadata" crossorigin="anonymous">
<!-- This source won't show the canvas visualization: -->
<!--source src="media/file_example_MP3_2MG.mp3" /-->
<!-- But this one shows: -->
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_2MG.mp3">
Your browser does not support the vedio tag.
</audio>
</div>
<div style="width: 100%;">
<canvas id="canvasEl" style="width: 88%; height: 100px;"></canvas>
</div>
</body>
Problem:
Safari shows canvas visualization with this audio source:
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_2MG.mp3">
But when I place the exact same audio file in a public location and serve it statically, like this this as the audio source:
<source src="media/file_example_MP3_2MG.mp3">
...the audio is played but the canvas visualization is not shown. Due to Web Audio API analyzer node output being all 128 and not changing. It means the analyzer node is not outputting anything.
Note
I serve the web page statically by python3 -m http.server on my machine.
Question
Only the Safari browser shows different behavior for these two, why?
<!-- This source works fine: -->
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_2MG.mp3">
<!-- This source doesn't show the canvas visualization but the audio sound is played: -->
<source src="media/file_example_MP3_2MG.mp3">
Occasional observation
In the case of <source src="media/file_example_MP3_2MG.mp3"> the play/pause button, occasionally starts the audio from the beginning. I mean, it doesn't pick the sound where it left off. It just restarts the sound.
But with the other source, the play/pause behaves as expected.
The above occasional observation is accompanied by such a network communication:
I am attempting to make a Spotify visualizer as a personal project, but can't seem to find a way to analyze the sound before it is played through the browser. I have the Spotify player playing music through the browser, and the visualizer is working with mp3 files stored on my machine. I just don't have the slightest clue on how to connect the two. Additionally, I looked through the Spotify API and couldn't seem to find anything there either.
If there is no way to analyze a Dpotify track directly, is there any way to capture the sound played through the browser first, then play through the p5.js loadSound() function?
Here are the code snippets for reference:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.2/p5.js">
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.2/addons/p5.dom.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.2/addons/p5.sound.js"></script>
</head>
<body>
<script src="https://sdk.scdn.co/spotify-player.js"></script>
<script>
window.onSpotifyWebPlaybackSDKReady = () => {
const token =
'redacted';
const player = new Spotify.Player({
name: 'Web Playback SDK Quick Start Player',
getOAuthToken: cb => {
cb(token);
}
});
// Error handling
player.addListener('initialization_error', ({
message
}) => {
console.error(message);
});
player.addListener('authentication_error', ({
message
}) => {
console.error(message);
});
player.addListener('account_error', ({
message
}) => {
console.error(message);
});
player.addListener('playback_error', ({
message
}) => {
console.error(message);
});
// Playback status updates
player.addListener('player_state_changed', state => {
console.log(state);
});
// Ready
player.addListener('ready', ({
device_id
}) => {
console.log('Ready with Device ID', device_id);
});
// Not Ready
player.addListener('not_ready', ({
device_id
}) => {
console.log('Device ID has gone offline', device_id);
});
// Connect to the player!
player.connect();
};
</script>
<script src="visualizer.js"></script>
var spotifyToken = ""
//["streaming", "user-read-birthdate", "user-read-email", "user-read-private"]
var song;
var amp;
var button;
var volHistory = [];
function toggleSong(){
if (song.isPlaying()){
song.pause();
} else {
song.play();
}
}
function preload(){
soundFormats('mp3');
song = loadSound('backgroundsong.mp3')
}
function setup(){
var canvas = createCanvas(window.innerWidth/2, 100);
//canvas.parent('div id')
canvas.position(0, (windowHeight - height) / 2)
//var canvasLeft = createCanvas(window.innerWidth/2, 100);
//canvasLeft.position(windowWidth/2, (windowHeight - height) / 2)
//createCanvas(window.innerWidth, window.innerHeight);
masterVolume(0.002,0,0);
button = createButton('toggle');
button.mousePressed(toggleSong);
song.loop();
amp = new p5.Amplitude();
}
function draw(){
background(0);
var volume = amp.getLevel();
volHistory.push(volume * 400);
beginShape();
stroke(0, 255, 0);
strokeWeight(3);
strokeCap(ROUND);
strokeJoin(ROUND);
noFill();
for (var i = 0; i < volHistory.length; i++) {
var y = map(volHistory[i], 0, 1, height, 0);
vertex(i*1.5, y);
if (i*1.5 > width) {
volHistory.splice(0, 1);
}
}
endShape();
//ellipse(400, 400, volume * 800, volume * 800);
}
Processing raw data samples is not possible using Spotify's Web Playback SDK. As seen in the following quote Spotify uses Encrypted Media Extensions in order to make playback possible within the browser.
The Web Playback SDK is client-side JavaScript library which allows you to create a new player in Spotify Connect and play any audio track from Spotify in the browser via Encrypted Media Extensions.
You could use this Spotify Web API endpoint and try to visualize one of those properties. There are no actual samples (which would be the amplitude you asked for) but maybe something those data sets include will fit your needs.
I found a reddit thread where some people showed off some visualizations mostly using the before mentioned API endpoint.
Another option is to use Spotify's Android or iOS SDK where you have access to raw data samples though I am not sure if it is allowed by Spotify's terms of service to process those samples. But due to you are looking for something client-sided within the browser this won't fit your needs.
So I am embedding a video into a custom iframe, and I'm not using youtube, vimeo or any of those so I can't use their APIs. I am making an idle-timer for it, so when the user hasnt acted in X amount of time, it will bring up a confirm window asking if they want to keep watching or restart. However, while this window is up, I want the video to pause, which is proving surprisingly difficult. It also pretty much needs to be cross-domain as I will be serving the videos with an s3 bucket.
I have seen many threads saying this is basically not possible, but I find that hard to believe. Is it true?
Here's my code (the main part I need help with is pauseVideo() near the bottom):
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>HRMSC</title>
</head>
<body>
<iframe class="iframe" id="primaryVideo" src="amazon-s3-video-link.mp4"
width="1000"
height="562.5">
<p> Your browser does not support iframes. </p>
</iframe>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js">
</script>
<script type="text/javascript" src="./IdleScript.js">
</script>
</body>
</html>
IdleScript.js :
var idleTime = 0;
var clickIframe = window.setInterval(checkFocus, 100);
var idleInterval = setInterval(timerIncrement, 600); // 1 second
var i = 0;
function checkFocus() {
if(document.activeElement == document.getElementById("primaryVideo")) {
idleTime = 0;
console.log("clicked "+(i++));
$('#primaryVideo').blur();
}
}
function timerIncrement() {
idleTime = idleTime + 1;
if (idleTime > 5) { // seconds
console.log("restart?");
if (this.resetInterstitial()){
idleTime = 0;
window.location.reload();
}
else{
idleTime = 0;
console.log("keep watching");
}
}
}
var pauseVideo = function ( element ) {
// WHAT CAN I DO HERE?
console.log("pause!");
// WHAT CAN I DO HERE?
};
function resetInterstitial(){
pauseVideo(primaryVideo);
return confirm("You haven't tapped anything in a while. Do you want to keep watching or start from the beginning?");
}
Use a <video>-tag: https://www.w3schools.com/tags/tag_video.asp
and use the build-in javascript functions https://www.w3schools.com/tags/av_met_pause.asp
im having a problem with a javascript file possibly failing to load or containing errors
HTML Code:
<head>
<title>TEMPLATE</title>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width">
<meta name="author" content="Adam Bullock">
<link rel="stylesheet" type="text/css" href="stylesheet.css">
<script src="js/main.js"></script>
</head>
Javascript Code:
/*
* NavBar_ArrowSlider moves an arrow depending on the users move selection on
* the navigation bar
*
* The script takes a position an moves the slider image to a set margin
*
* NavElement - number of list item
* MouseEvent - if users moves out of the name bar the icon will move back to
* its default position
* DefaultPosition - number of list item
*/
function navBar_arrowSlider(navElement, mouseEvent, defaultPosition) {
var arrow = document.getElementById(navBarSlider);
var navElementPosition = ["90px","295px","493px","695px"];
// DEBUG
alert("Positon:" + navElement + " Move Event:" + mouseEvent + " default position:" + defaultPositon);
// END OF DEBUG
if (mouseEvent === "mouseOver") {
var position = navElementPosition[navElement];
arrow.style.marginLeft=navElementPosition;
} else {
var position = navElementPosition[defaultPosition];
arrow.style.marginLeft=navElementPosition;
}
}
im at a lost as to why this is not working, because i cant seem to see any errors atm, bare in mind i have been working solid for the past 14 hours :/ life of a programmer
Any help would be appreciated
Thanks
Bull
You cannot have JavaScript code in <script> tags that loads external JavaScript. You need to break them up:
<script type="text/javascript" src="js/main.js"></script>
<script type="text/javascript">navBar_arrowSlider(0,'moveOver',0)</script>
A <script> element can have either a "src" attribute or JavaScript content, but not both.
how can i get the length of a route?
I have been looking at this basic code of a route from a to b:
// Get the DOM node to which we will append the map
var mapContainer = document.getElementById("mapContainer");
// Create a map inside the map container DOM node
var map = new nokia.maps.map.Display(mapContainer, {
// Initial center and zoom level of the map
center: [52.51, 13.4],
zoomLevel: 7,
// We add the behavior component to allow panning / zooming of the map
components:[new nokia.maps.map.component.Behavior()]
}),
router = new nokia.maps.routing.Manager(); // create a route manager;
// The function onRouteCalculated will be called when a route was calculated
var onRouteCalculated = function (observedRouter, key, value) {
if (value == "finished") {
var routes = observedRouter.getRoutes();
//create the default map representation of a route
var mapRoute = new nokia.maps.routing.component.RouteResultSet(routes[0]).container;
map.objects.add(mapRoute);
//Zoom to the bounding box of the route
map.zoomTo(mapRoute.getBoundingBox(), false, "default");
} else if (value == "failed") {
alert("The routing request failed.");
}
};
/* We create on observer on router's "state" property so the above created
* onRouteCalculated we be called once the route is calculated
*/
router.addObserver("state", onRouteCalculated);
// Create waypoints
var waypoints = new nokia.maps.routing.WaypointParameterList();
waypoints.addCoordinate(new nokia.maps.geo.Coordinate(52.51652540955727, 13.380154923889933));
waypoints.addCoordinate(new nokia.maps.geo.Coordinate(52.52114106145058,13.40921934080231));
/* Properties such as type, transportModes, options, trafficMode can be
* specified as second parameter in performing the routing request.
* See for the mode options the "nokia.maps.routing.Mode" section in the developer's guide
*/
var modes = [{
type: "shortest",
transportModes: ["car"],
options: "avoidTollroad",
trafficMode: "default"
}];
// Calculate the route (and call onRouteCalculated afterwards)
router.calculateRoute(waypoints, modes);shortest
When the route is successfully calculated, the call-back function holds an array of one or more routes.
Code:
var routes = observedRouter.getRoutes();
Each of these holds a route summary, where you can obtain useful info about the route.
Code:
alert ("Route Length = " + routes[0].totalLength + " m.");
alert ("As the crow flies = " + routes[0].waypoints[0].mappedPosition.distance(routes[0].waypoints[1].mappedPosition) + " m.");
(Obviously you'll need to use waypoints.length -1 for a calculation with stop-overs)
Here is your code example, with the extra two lines added, You need to use your own app id and token to get it to work.
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<!--
Example from Nokia Maps API Playground, for more information visit http://api.maps.nokia.com
-->
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=7; IE=EmulateIE9"/>
<base href="http://developer.here.net/apiexplorer/examples/api-for-js/routing/map-with-route-from-a-to-b.html" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<title>Nokia Maps API Example: Add route from A to B</title>
<meta name="description" content="Routing Manager offers the ability to request a route with various modes between two points"/>
<meta name="keywords" content="routing, services, a to b, route, direction, navigation"/>
<!-- For scaling content for mobile devices, setting the viewport to the width of the device-->
<meta name=viewport content="width=device-width, height=device-height, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"/>
<!-- Styling for example container (NoteContainer & Logger) -->
<link rel="stylesheet" type="text/css" href="http://developer.here.net/apiexplorer/examples/templates/js/exampleHelpers.css"/>
<!-- By default we add ?with=all to load every package available, it's better to change this parameter to your use case. Options ?with=maps|positioning|places|placesdata|directions|datarendering|all -->
<script type="text/javascript" charset="UTF-8" src="http://api.maps.nokia.com/2.2.3/jsl.js?with=all"></script>
<style type="text/css">
html {
overflow:hidden;
}
body {
margin: 0;
padding: 0;
overflow: hidden;
width: 100%;
height: 100%;
position: absolute;
}
#mapContainer {
width: 100%;
height: 100%;
left: 0;
top: 0;
position: absolute;
}
</style>
</head>
<body>
<div id="mapContainer"></div>
<script type="text/javascript" id="exampleJsSource">
/* Set authentication token and appid
* WARNING: this is a demo-only key
* please register on http://api.developer.nokia.com/
* and obtain your own developer's API key
*/
nokia.Settings.set("appId", "YOUR APP ID");
nokia.Settings.set("authenticationToken", "YOUR TOKEN");
// Get the DOM node to which we will append the map
var mapContainer = document.getElementById("mapContainer");
// Create a map inside the map container DOM node
var map = new nokia.maps.map.Display(mapContainer, {
// Initial center and zoom level of the map
center: [52.51, 13.4],
zoomLevel: 7,
// We add the behavior component to allow panning / zooming of the map
components:[new nokia.maps.map.component.Behavior()]
}),
router = new nokia.maps.routing.Manager(); // create a route manager;
// The function onRouteCalculated will be called when a route was calculated
var onRouteCalculated = function (observedRouter, key, value) {
if (value == "finished") {
var routes = observedRouter.getRoutes();
//create the default map representation of a route
var mapRoute = new nokia.maps.routing.component.RouteResultSet(routes[0]).container;
map.objects.add(mapRoute);
//Zoom to the bounding box of the route
map.zoomTo(mapRoute.getBoundingBox(), false, "default");
alert ("Route Length = " + routes[0].totalLength + " m.");
alert ("As the crow flies = "
routes[0].waypoints[0].mappedPosition.distance(
routes[0].waypoints[1].mappedPosition) + " m.");
} else if (value == "failed") {
alert("The routing request failed.");
}
};
/* We create on observer on router's "state" property so the above created
* onRouteCalculated we be called once the route is calculated
*/
router.addObserver("state", onRouteCalculated);
// Create waypoints
var waypoints = new nokia.maps.routing.WaypointParameterList();
waypoints.addCoordinate(new nokia.maps.geo.Coordinate(52.51652540955727, 13.380154923889933));
waypoints.addCoordinate(new nokia.maps.geo.Coordinate(52.52114106145058, 13.40921934080231));
/* Properties such as type, transportModes, options, trafficMode can be
* specified as second parameter in performing the routing request.
*
* See for the mode options the "nokia.maps.routing.Mode" section in the developer's guide
*/
var modes = [{
type: "shortest",
transportModes: ["car"],
options: "avoidTollroad",
trafficMode: "default"
}];
// Calculate the route (and call onRouteCalculated afterwards)
router.calculateRoute(waypoints, modes);
</script>
</body>
</html>