So as you can see below, it looks like the only method i can think of to get the channel count of an audio file(including wav, mp3) is by using decodeAudioData(). However, i want to be able to get the channel count without decoding the whole audio file which uses alot of CPU (i think its CPU).
Is there any other method to achieve this? If not, is there a way of decoding only a section of the file?
Thanks in advance!
audio_file.onchange = function() {
let files = this.files;
let file = URL.createObjectURL(files[0]);
let xhr = new XMLHttpRequest();
xhr.open('GET', file, true);
xhr.responseType = "arraybuffer";
xhr.onload = () => {
audioContext.decodeAudioData(xhr.response).then((buffer) => {
CHANNEL_COUNT = buffer.numberOfChannels;
if (CHANNEL_COUNT === 4){
document.querySelector('.status').innerHTML = 'FOA file detected';
}
else {
document.querySelector('.status').innerHTML = "Audio file with " + CHANNEL_COUNT + " channels detected";
}
});
}
xhr.send(null);
audioElement.src = file;
audioElement.load();
};
The channel information is usually at the beginning of the encoded file, so you could just download the first few thousand bytes and use decodeAudioData on it. However, I think many audio containers can support changing the number of channels in the middle of the file. You'll lose that information, but then so would decodeAudioData.
Related
I achieved to get a video from php using this code :
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onload = function () {
var blob_uri = URL.createObjectURL(this.response);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('blob');
req.send(null);
However, the loading is long so I want to show data as soon as I get it. From Mozilia, it is indicated we can use plain or "" as mime to get the text in progress. However, I can't achieve to convert plain/text to video/mp4 using a blob. Currently this is the code that doesn't work. I try to get the video when some part is available while the rest is still downloading.
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onprogress = function () {
var text = b64toBlob(Base64.encode(this.response), "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.onload = function () {
var text = b64toBlob(this.response, "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('text\/plain');
req.send(null);
Thanks.
NB : This JavaScript is fetching for this php code : https://codesamplez.com/programming/php-html5-video-streaming-tutorial
But echo data has been changed by echo base64_encode(data);
If you use the Fetch API instead of XMLHttpRequest you can consume the response as a ReadableStream which can be fed into a SourceBuffer. This will allow the video to be playable as soon as it starts to load instead of waiting for the full file to download. This does not require any special video container formats, back-end processing or third-party libraries.
const vid = document.getElementById('vid');
const format = 'video/webm; codecs="vp8,vorbis"';
const mediaSource = new MediaSource();
let sourceBuffer = null;
mediaSource.addEventListener('sourceopen', event => {
sourceBuffer = mediaSource.addSourceBuffer(format);
fetch('https://bes.works/dev/samples/test.webm')
.then(response => process(response.body.getReader()))
.catch(err => console.error(err));
}); vid.src = URL.createObjectURL(mediaSource);
function process(stream) {
return new Response(
new ReadableStream({
start(controller) {
async function read() {
let { done, value } = await stream.read();
if (done) { controller.close(); return; }
sourceBuffer.appendBuffer(value);
sourceBuffer.addEventListener(
'updateend', event => read(),
{ once: true }
);
} read();
}
})
);
}
video { width: 300px; }
<video id="vid" controls></video>
As indicated in the comments, you are missing some decent components.
You can implement what you are asking for but you need to make some changes. Following up on the HTML5 streaming API you can create a stream that will make the video using segments you fetch from the server.
Something to keep in mind is the HLS or DASH protocol that already exists can help, looking at the HLS protocol can help as it's simple to use with the idea of segments that can reach out to your server and just decode your base64'd feed.
https://videojs.github.io/videojs-contrib-hls/
I am trying to retrieve a file from the user's file system and upload it to AWS S3. However, I have had no success thus far doing so. To be more specific, I have been working with trying to upload images. So far the images won't render properly whenever I upload them. I am really only familiar with uploading images as Blobs, but since SHA256 function can't read a Blob I'm unsure what to do. Below is my code:
var grabFile = new XMLHttpRequest();
grabFile.open("GET", 'https://s3.amazonaws.com/'+bucketName+'/asd.jpeg', true);
grabFile.responseType = "arraybuffer";
grabFile.onload = function( e ) {
var grabbedFile = this.response;
var arrayBufferView = new Uint8Array( this.response );
var blob = new Blob( [ arrayBufferView ], { type: "image/jpeg" } );
var base64data = '';
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
//var readData = reader.result;
var readData = blob;
//readData = readData.split(',').pop();
console.log(readData);
console.log(readData.toString());
var shaString = CryptoJS.SHA256(readData.toString()).toString();
var request = new XMLHttpRequest();
var signingKey = getSigningKey(dateStamp, secretKey, regionName, serviceName);
var headersList = "content-type;host;x-amz-acl;x-amz-content-sha256;x-amz-date";
var time = new Date();
time = time.toISOString();
time = time.replace(/:/g, '').replace(/-/g,'');
time = time.substring(0,time.indexOf('.'))+"Z";
var canonString = "PUT\n"+
"/asd5.jpeg\n"+
"\n"+
//"content-encoding:base64\n"+
"content-type:image/jpeg\n"+
"host:"+bucketName+".s3.amazonaws.com\n"+
'x-amz-acl:public-read\n'+
'x-amz-content-sha256:'+shaString+"\n"+
'x-amz-date:'+time+'\n'+
'\n'+
headersList+'\n'+
shaString;
var stringToSign = "AWS4-HMAC-SHA256\n"+
time+"\n"+
dateStamp+"/us-east-1/s3/aws4_request\n"+
CryptoJS.SHA256(canonString);
var authString = CryptoJS.HmacSHA256(stringToSign, signingKey).toString();
var auth = "AWS4-HMAC-SHA256 "+
"Credential="+accessKey+"/"+dateStamp+"/"+regionName+"/"+serviceName+"/aws4_request, "+
"SignedHeaders="+headersList+", "+
"Signature="+authString;
request.open("PUT", "https://"+bucketName+".s3.amazonaws.com/asd5.jpeg", true);
request.setRequestHeader("Authorization", auth);
//request.setRequestHeader("content-encoding", "base64");
request.setRequestHeader("content-type", "image/jpeg");
request.setRequestHeader('x-amz-acl', 'public-read');
request.setRequestHeader("x-amz-content-sha256", shaString);
request.setRequestHeader("x-amz-date", time);
request.send(readData.toString());
console.log(request);
How do I go about doing this? The code above just uploads something that's just a few Bytes because blob.toString() comes out as [Object Blob] and that's what gets uploaded. If I don't toString() it, I get an error from my SHA256 function.
As you can see, I tried reading it as Base64 before uploading it, but that did not resolve my problem either. This problem has been bugging me for close to a week now and I would love to get this solved. I've tried changing content-type, changed the body of what I'm uploading, etc. but nothing has worked.
EDIT: Forgot to mention (even though the title should imply it) but I cannot use the SDK for this. I was using it at one point, and with it I was able to upload images as Blobs. So I know this is possible, it's just that I don't know what crafty thing the SDK is doing to upload it.
EDIT2: I found the solution just in case someone stumbles across this in the future. Try setting replace every place I have shaString with 'UNSIGNED PAYLOAD' and send the blob and it will work! Here's where I found it: https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.
I'm trying to create a copy of an image (which is located at a url), and save it to Firebase's storage facility. I want to store the actual image file and not just the url. If I understand correctly, I first need to convert the image at the url to a blob or file and then upload the data to the Firebase storage.
This is my current attempt with Javascript:
function savePicture(){
var url = ["http://carltonvet.com.au/sites/default/files/styles/large/public/images/article/cats.jpg"];
var blobpic = new Blob(url, {type: 'file'});
console.log(blobpic);
var user = firebase.auth().currentUser;
if (user != null) {
var userid = user.uid;
var ref = firebase.storage().ref(userid + "profilePhoto");
ref.put(blobpic).then(function(snapshot) {
console.log('Picture is uploaded!');
console.log(snapshot);
var filePath = snapshot.metadata.fullPath;
document.getElementById('picTestAddress').innerHTML = ""+filePath;
document.getElementById('picTestImage').src = ""+filePath;
});
}else{
console.log("Something went wrong, user is null.");
}
}
I have two HTML tags like this:
<div id="picTestAddress"></div>
<img id="picTestImage" />
I'm pretty sure this is only saving the url and not the physical image.
The "picTestAddress" gets filled in with "qAjnfi387DHhd389D9j3r/profilePhoto", and the console shows the following error for "picTestImage": GET file:///android_asset/www/qAjnfi387DHhd389D9j3r/profilePhoto net::ERR_FILE_NOT_FOUND
I'm using Firebase for Web and Cordova. And I'm testing the app on my android phone.
I understand that the error is because it's looking for the image on my phone's local file system. This makes sense to me, so I thought I could fix this by appending my app's address to the beginning of the filePath (eg: document.getElementById('picTestImage').src = "https://firebasestorage.googleapis.com/v0/b/MY_APP.appspot.com/o/"+filePath;).
To find the correct path, I navigated to the file's location in the Firebase console and copied the "Download url" address - but when I checked this (by entering it into my web browser) it loaded a white page which contained one line of text, which was the original url: "http://carltonvet.com.au/sites/default/files/styles/large/public/images/article/cats.jpg"
So now I think I've just saved the url to the storage instead of the actual image file.
I've been following the Firebase docs, and I think I have the uploading part working correctly, but I think I'm failing when it comes to converting the url to the blob/file with Javascript.
I've looked through some other questions, such as this one: How to store and view images on firebase? and was going to follow the example here: https://github.com/firebase/firepano but it says that it's now a legacy example and I can't seem to find an updated version in Firebase's samples section.
Any advice or help with how to do this would be really appreciated.
Thank you in advance!
Looks good, though I'd also consider a promisified version:
function getBlob(url) {
return new Promise(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.responseType = 'blob';
xhr.onload = function(event){
var blob = xhr.response;
resolve(blob);
};
xhr.onerror = reject();
xhr.open('GET', url);
xhr.send();
}
}
function storageURLForPhoto(oldURL, newName) {
getBlob(oldURL)
.then(function(blob) {
var picRef = firebase.storage().ref().child(newName);
return picRef.put(blob)
})
.then(function(snapshot) {
return snapshot.downloadURL;
});
.catch(function() {
// handle any errors
})
}
Little easier to reason about :)
The following works:
function savePhoto(){
var url = "http://www.planetware.com/photos-large/F/france-paris-eiffel-tower.jpg";
// First, download the file:
var xhr = new XMLHttpRequest();
xhr.responseType = 'blob';
xhr.onload = function(event) {
var blob = xhr.response;
// Get the current user:
var user = firebase.auth().currentUser;
if (user != null) {
var userid = user.uid;
// Define where to store the picture:
var picRef = firebase.storage().ref(userid + "/profilePhoto");
// Store the picture:
picRef.put(blob).then(function(snapshot) {
console.log('Picture uploaded!');
// Now get image from storage and display in div...
picRef.getDownloadURL().then(function(pic) {
var userspic = pic;
document.getElementById('picTestImage').src = userspic;
}).catch(function(error) {
console.log("There was an error: "+error);
});
});
}else{
console.log("We weren't able to confirm there is a current user, something went wrong.");
}
};
xhr.open('GET', url);
xhr.send();
}
For example, say the user loads some very large images or media files in to your web app. When they return you want your app to show what they've previously loaded, but can't keep the actual file data in LocalStorage because the data is too large.
This is NOT possible with localStorage. Data stored in localStorage needs to be one of the primitive types that can be serializable. This does not include the File object.
For example, this will not work as you'd expect:
var el = document.createElement('input');
el.type='file';
el.onchange = function(e) {
localStorage.file = JSON.stringify(this.files[0]);
// LATER ON...
var reader = new FileReader();
reader.onload = function(e) {
var result = this.result; // never reaches here.
};
reader.readAsText(JSON.parse(localStorage.f));
};
document.body.appendChild(el);
The solution is to use a more powerful storage option like writing the file contents to the HTML5 Filesystem or stashing it in IndexedDB.
Technically you can if you just need to save small files in localStorage.
Just base64 that ish and since it's a string... it's localStorage-friendly.
I think localStorage has a ~5MB limit. base64 strings are pretty low file size so this is a feasible way to store small images. If you use this lazy man's way, the downside is you'll have to mind the 5MB limit. I think it could def be a solution depending on your needs.
Yes, this is possible. You can insert whatever information about the file you want into LocalStorage, provided you serialize it to one of the primitive types supported. You can also serialize the whole file into LocalStorage and retrieve that later if you want, but there are limitations on the size of the file depending on browser.
The following shows how to achieve this using two different approaches:
(function () {
// localStorage with image
var storageFiles = JSON.parse(localStorage.getItem("storageFiles")) || {},
elephant = document.getElementById("elephant"),
storageFilesDate = storageFiles.date,
date = new Date(),
todaysDate = (date.getMonth() + 1).toString() + date.getDate().toString();
// Compare date and create localStorage if it's not existing/too old
if (typeof storageFilesDate === "undefined" || storageFilesDate < todaysDate) {
// Take action when the image has loaded
elephant.addEventListener("load", function () {
var imgCanvas = document.createElement("canvas"),
imgContext = imgCanvas.getContext("2d");
// Make sure canvas is as big as the picture
imgCanvas.width = elephant.width;
imgCanvas.height = elephant.height;
// Draw image into canvas element
imgContext.drawImage(elephant, 0, 0, elephant.width, elephant.height);
// Save image as a data URL
storageFiles.elephant = imgCanvas.toDataURL("image/png");
// Set date for localStorage
storageFiles.date = todaysDate;
// Save as JSON in localStorage
try {
localStorage.setItem("storageFiles", JSON.stringify(storageFiles));
}
catch (e) {
console.log("Storage failed: " + e);
}
}, false);
// Set initial image src
elephant.setAttribute("src", "elephant.png");
}
else {
// Use image from localStorage
elephant.setAttribute("src", storageFiles.elephant);
}
// Getting a file through XMLHttpRequest as an arraybuffer and creating a Blob
var rhinoStorage = localStorage.getItem("rhino"),
rhino = document.getElementById("rhino");
if (rhinoStorage) {
// Reuse existing Data URL from localStorage
rhino.setAttribute("src", rhinoStorage);
}
else {
// Create XHR, BlobBuilder and FileReader objects
var xhr = new XMLHttpRequest(),
blob,
fileReader = new FileReader();
xhr.open("GET", "rhino.png", true);
// Set the responseType to arraybuffer. "blob" is an option too, rendering BlobBuilder unnecessary, but the support for "blob" is not widespread enough yet
xhr.responseType = "arraybuffer";
xhr.addEventListener("load", function () {
if (xhr.status === 200) {
// Create a blob from the response
blob = new Blob([xhr.response], {type: "image/png"});
// onload needed since Google Chrome doesn't support addEventListener for FileReader
fileReader.onload = function (evt) {
// Read out file contents as a Data URL
var result = evt.target.result;
// Set image src to Data URL
rhino.setAttribute("src", result);
// Store Data URL in localStorage
try {
localStorage.setItem("rhino", result);
}
catch (e) {
console.log("Storage failed: " + e);
}
};
// Load blob as Data URL
fileReader.readAsDataURL(blob);
}
}, false);
// Send XHR
xhr.send();
}
})();
Source