I am coding youtube video downloader chrome extension. But youtube has separated mp4 and mp3. How can I combine the audio file and image file I received in blob type and turn it into a video with sound?
async function downloadFile(urlToSend) {
return new Promise(resolve => {
var req = new XMLHttpRequest();
req.open("GET", urlToSend, true);
req.responseType = "blob";
req.onload = function (event) {
// var blob = req.response;
// var fileName = req.getResponseHeader("fileName") //if you have the fileName header available
// var link = document.createElement('a');
// link.href = window.URL.createObjectURL(blob);
resolve(req.response)
};
req.send();
})
};
async function zfc() {
var v = await downloadFile('/videoplayback.mp4')
var a = await downloadFile('/videoplayback.weba')
let newBlob = new Blob([v, a], { type: 'video/mp4' })
var as = document.createElement('a')
as.href = window.URL.createObjectURL(newBlob)
as.download = window.URL.createObjectURL(newBlob)
console.log(as)
console.log(newBlob)
// as.click()
var c = document.createElement('video')
c.src = window.URL.createObjectURL(newBlob)
document.body.appendChild(c)
}
zfc()
I tried merging with new blob but the video still has no sound. Can you please help?
Example video link:
https://rr7---sn-u0g3uxax3-xncs.googlevideo.com/videoplayback?expire=1641956798&ei=XvHdYbG8MI2qx_AP14yRoAQ&ip=95.2.13.77&id=o-APHbyEMFJZdr7FwyLDOkQWqycmDmo9oy8bSvx7qP4z-P&itag=313&aitags=133%2C134%2C135%2C136%2C137%2C160%2C242%2C243%2C244%2C247%2C248%2C271%2C278%2C313&source=youtube&requiressl=yes&mh=YY&mm=31%2C29&mn=sn-u0g3uxax3-xncs%2Csn-hgn7yn76&ms=au%2Crdu&mv=m&mvi=7&pl=21&initcwndbps=88750&vprv=1&mime=video%2Fwebm&ns=O-4SxebNzTxani0g_ScQEtMG&gir=yes&clen=589586219&dur=347.800&lmt=1638064072881015&mt=1641934876&fvip=2&keepalive=yes&fexp=24001373%2C24007246&c=WEB&txp=4532434&n=hBnxjZJEX82hOJ&sparams=expire%2Cei%2Cip%2Cid%2Caitags%2Csource%2Crequiressl%2Cvprv%2Cmime%2Cns%2Cgir%2Cclen%2Cdur%2Clmt&sig=AOq0QJ8wRQIhAIu0SR_UsiQyUpJIkL_erKc_dElHk-1rwJMCI1486YaSAiBkH4jg8WHzRvEDsxnTTheBM_f1KsBFzqLiIUFJAIKh5w%3D%3D&lsparams=mh%2Cmm%2Cmn%2Cms%2Cmv%2Cmvi%2Cpl%2Cinitcwndbps&lsig=AG3C_xAwRAIgapuFt6YijG3nAVfbULkJq2_uAwcjOnZpd0ZNUo1h5NMCIGgJh22ksRMeMOUkhhQUlRapjqa4DhVv-KfcfnYhkW8l
Example sound link:
https://rr7---sn-u0g3uxax3-xncs.googlevideo.com/videoplayback?expire=1641956798&ei=XvHdYbG8MI2qx_AP14yRoAQ&ip=95.2.13.77&id=o-APHbyEMFJZdr7FwyLDOkQWqycmDmo9oy8bSvx7qP4z-P&itag=251&source=youtube&requiressl=yes&mh=YY&mm=31%2C29&mn=sn-u0g3uxax3-xncs%2Csn-hgn7yn76&ms=au%2Crdu&mv=m&mvi=7&pl=21&initcwndbps=88750&vprv=1&mime=audio%2Fwebm&ns=O-4SxebNzTxani0g_ScQEtMG&gir=yes&clen=5822955&dur=347.821&lmt=1638059244799001&mt=1641934876&fvip=2&keepalive=yes&fexp=24001373%2C24007246&c=WEB&txp=4532434&n=hBnxjZJEX82hOJ&sparams=expire%2Cei%2Cip%2Cid%2Citag%2Csource%2Crequiressl%2Cvprv%2Cmime%2Cns%2Cgir%2Cclen%2Cdur%2Clmt&sig=AOq0QJ8wRQIgaqKAjgRHlNms4IMVKwGJmRb2DOl7slWujc2OeIqIlSkCIQDvVhAPmxgLg0g2WvrgjB0iNNnCyDbyRQQvu5ODx4PLXA%3D%3D&lsparams=mh%2Cmm%2Cmn%2Cms%2Cmv%2Cmvi%2Cpl%2Cinitcwndbps&lsig=AG3C_xAwRAIgapuFt6YijG3nAVfbULkJq2_uAwcjOnZpd0ZNUo1h5NMCIGgJh22ksRMeMOUkhhQUlRapjqa4DhVv-KfcfnYhkW8l
I am also currently working on a YouTube video downloader extension for Firefox. If you are using ytdl-core, the response object contains a link that has both in res.player_response.streaming_data.formats[0]. However, there is only one such link, which doesn't allow for users to select their preferred resolution, so being able to merge the two would be extremely helpful.
Also if you are using youtube-dl or anything similar to such the response object should be the same or very similar to ytdl-core's
I've currently tried every possible ways to do this but I cannot get it to work, despite reading every related question on the internet ...
I'm simply trying to download an mp3 arrayBuffer that i GET from an url with the module xmlHttpRequest from my node server code with the intent to then writing the buffer to an mp3 file, here is the code:
const endpoint = "https://cdns-preview-a.dzcdn.net/stream/c-ae4124ee0e63b9f6abffddb36b9695cf-2.mp3";
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var oReq = new XMLHttpRequest();
oReq.open("GET", endpoint, true);
oReq.responseType = "arraybuffer";
oReq.onload = function (oEvent) {
if (this.status != 200) {
console.log(this.status)
}
console.log(oReq.response);
var uInt8Array = new Uint8Array(oReq.response);
console.log(uInt8Array);
var dest = "1.mp3";
var stream = fs.createWriteStream(dest);
stream.write(uInt8Array);
stream.end();
}
};
oReq.send();
oReq.response is always empty, no matter what I type in oReq.responseType(arraybuffer, blob).
if I try to write oReq.responseText, it's always going to be some scuffed encoding because it was translated to text.
Can you give me advices, is there some underlying deep layer that I don't understand, is it possible to do what I wanna achieve?
Found a solution with http get instead of xmlHttpRequest:
const endpointe = "https://cdns-preview-a.dzcdn.net/stream/c-ae4124ee0e63b9f6abffddb36b9695cf-2.mp3";
https.get(endpointe, (res) => {
datatest = []
res.on('data', function(chunk) {
datatest.push(chunk);
console.log(chunk);
});
// The whole response has been received. Print out the result.
res.on('end', () => {
//console.log(data)
var dest = "test.mp3";
var stream = fs.createWriteStream(dest);
var buffer = Buffer.concat(datatest);
stream.write(buffer);
stream.end();
});
}).on('error', (e) => {
console.error(e);
});
I'm making a video streaming service and I'm having trouble recreating the source stream on the client.
I have made a camera page that sends video data chunks to the server along with a video index, the server then stores this data chunk on the harddisk for the client to download. I can retrieve the video data chunk from the client by calling the url:
/Lessen/LesStreamPart/{streamid}?Index={index}
Explanation:
hub.server.join(current_lesid);
When a client joins the stream the page will start to receive updates about the stream by SignalR:
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When a update is received, the page checks if it has already setup the MediaSource control for that stream, if not, because it is the first time, the page will start the stream:
function startStream()
When the stream is started the page will setup the MediaSource object for the video element. Then wait till the MediaSource object is instantiated.
function openStream()
After the MediaSource object has been instantiated the page will start populating the MediaSource object with the Mimetype information, after that it will load the first part of the video stream and will append it to the MediaSource object.
function loadChunks()
Once the MediaSource update has finished, the page will start loading the remaining video parts.
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When the camera adds a new chunk, the page will be signalled again using SignalR. Because the streamid will match the page will then continue with loading the newer chunks by calling:
function loadChunks()
JS:
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid +"?Index=0";
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
}
req.send();
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength)
{
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
// Recursive call to add remaining chunks
loadChunks();
}
req.send();
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
HTML:
<input type="hidden" id="LesId" value="#(Model.Id)" />
<video autoplay controls id="VideoPlayerElement"></video>
OUTPUT:
The page doesn't show any errors, but I do get a broken video icon in the video element. Does anyone know what this might be?
I read in a different stackoverflow that it might be the VP8 codec that needs to be used, I changed it, but it remains not working.
EDIT:
I changed the javascript code a bit. It turned out I called the "loadChunks" function, but it was already called by the "updateend" event of the "sourceBuffer". I then got a lot more errors.
I changed the way I communicate with the server to "$.get();". It solved the errors, but I still get no image.
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=0";
//$("body").append("<video><source src='" + url + "'/></video>");
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
//}
//req.send();
});
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
//video.play();
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength) {
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
//}
//req.send();
});
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
I get perfect example to solve this problem and into simple way...
i am using three static files, but you can append data from socket's or any api also.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<br>
<video controls="true" autoplay="true"></video>
<script>
(async() => {
const mediaSource = new MediaSource();
const video = document.querySelector("video");
// video.oncanplay = e => video.play();
const urls = ["https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4", "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4","https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"];
const request = url => fetch(url).then(response => response.arrayBuffer());
// `urls.reverse()` stops at `.currentTime` : `9`
const files = await Promise.all(urls.map(request));
/*
`.webm` files
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
Uncaught DOMException: Failed to set the 'timestampOffset' property on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
*/
// const mimeCodec = "video/webm; codecs=opus";
// https://stackoverflow.com/questions/14108536/how-do-i-append-two-video-files-data-to-a-source-buffer-using-media-source-api/
const mimeCodec = "video/mp4; codecs=avc1.42E01E, mp4a.40.2";
const media = await Promise.all(files.map(file => {
return new Promise(resolve => {
let media = document.createElement("video");
let blobURL = URL.createObjectURL(new Blob([file]));
media.onloadedmetadata = async e => {
resolve({
mediaDuration: media.duration,
mediaBuffer: file
})
}
media.src = blobURL;
})
}));
console.log(media);
mediaSource.addEventListener("sourceopen", sourceOpen);
video.src = URL.createObjectURL(mediaSource);
async function sourceOpen(event) {
if (MediaSource.isTypeSupported(mimeCodec)) {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
for (let chunk of media) {
await new Promise(resolve => {
sourceBuffer.appendBuffer(chunk.mediaBuffer);
sourceBuffer.onupdateend = e => {
sourceBuffer.onupdateend = null;
sourceBuffer.timestampOffset += chunk.mediaDuration;
console.log(mediaSource.duration);
resolve()
}
})
}
mediaSource.endOfStream();
}
else {
console.warn(mimeCodec + " not supported");
}
};
})()
</script>
</body>
</html>
It seems to be a codec issue and the method for reading data. When you receive a video blob, you need to convert/store it using a FileReader, this worked for me. For best codec support I needed to use the VP8 codec (please inform me if you know a better one).
This is my working example where I use a MediaRecorder to record the webcam then paste the video blobs into a MediaSource.
const video1 = document.getElementById('video1');
const video2 = document.getElementById('video2');
const mediaSource = new MediaSource();
video2.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(openargs) {
navigator.mediaDevices
.getUserMedia({ audio: false, video: true })
.then(function (stream) {
video1.srcObject = stream;
var options = { mimeType: 'video/webm; codecs=vp8' };
var mediaRecorder = new MediaRecorder(stream, options);
var sourceBuffer = null;
mediaRecorder.ondataavailable = function (e) {
if (sourceBuffer == null) {
sourceBuffer = mediaSource.addSourceBuffer(mediaRecorder.mimeType);
window.sourceBuffer = sourceBuffer;
}
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(5000);
});
}
video {
width: 320px;
height: 180px;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<video id="video1" controls autoplay muted></video><br />
<video id="video2" controls autoplay muted></video>
</body>
</html>
I'm currently editing my mp3 file with multiple effects like so
var mainVerse = document.getElementById('audio1');
var s = source;
source.disconnect(audioCtx.destination);
for (var i in filters1) {
s.connect(filters1[i]);
s = filters1[i];
}
s.connect(audioCtx.destination);
The mp3 plays accordingly on the web with the filters on it. Is it possible to create and download a new mp3 file with these new effects, using web audio api or any writing to mp3 container javascript library ? If not whats the best to solve this on the web ?
UPDATE - Using OfflineAudioContext
Using the sample code from https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext/oncomplete
I've tried using the offline node like so;
var audioCtx = new AudioContext();
var offlineCtx = new OfflineAudioContext(2,44100*40,44100);
osource = offlineCtx.createBufferSource();
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'Song1.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
osource.buffer = myBuffer;
osource.connect(offlineCtx.destination);
osource.start();
//source.loop = true;
offlineCtx.startRendering().then(function(renderedBuffer) {
console.log('Rendering completed successfully');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = renderedBuffer;
song.connect(audioCtx.destination);
song.start();
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "filename.wav");
});
}).catch(function(err) {
console.log('Rendering failed: ' + err);
// Note: The promise should reject when startRendering is called a second time on an OfflineAudioContext
});
});
}
request.send();
}
// Run getData to start the process off
getData();
Still getting the recorder to download an empty file, I'm using the song source as the source for the recorder. The song plays and everything with his code but recorder doesn't download it
Use https://github.com/mattdiamond/Recorderjs to record a .wav file. Then use https://github.com/akrennmair/libmp3lame-js to encode it to .mp3.
There's a nifty guide here, if you need a hand: http://audior.ec/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/
UPDATE
Try moving
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
so that it is located above the call to start rendering, and connect it to osource instead, like so:
rec = new Recorder(osource, {
workerPath: 'Recorderjs/recorderWorker.js'
});
osource.connect(offlineCtx.destination);
osource.start();
offlineCtx.startRendering().then(function(renderedBuffer) {
.....
I need to capture microphone audio in IE10. So far I have two semi-working solutions:
getUserMedia from Microsoft's experimental WebRTC plugin:
http://www.html5labs.com/prototypes/media-capture-api-(2nd-updated)/media-capture-api-(2nd-update)/info
The issue with this is that while I can capture and replay the audio in the browser, I cannot send the audio to the server. In particular, it is not clear how to extract the audio data from the "blob" object:
function msStopRecordCallback(blob) {
console.log(blob) // outputs {}
console.dir(blob) // outputs {}
playMediaObject.Play(blob); // This works!
}
jRecorder: http://www.sajithmr.me/jrecorder-jquery The issue with this is that it relies on Flash to capture the audio, which is something I would like to avoid.
Are there any other ways to capture audio in IE10?
I recognize that my answer a bit late, but...
You may upload a blob to a server as following (Javascript):
function saveBlob(blob)
{
var uploader = new CustomXMLHttpRequest();
uploader.onpartreceived = function (response)
{
// TODO: handle the server response here
};
var base = window.location.toString();
var uploadService = base.substr(0, base.lastIndexOf("/")) + "/api/upload";
uploader.open("POST", uploadService, true);
uploader.responseType = "text";
var form = new FormData();
form.append("fname", blob, "audio.wav");
uploader.send(form);
}
On the server side, you may treat this blob as a file attachment, e.g. (C#):
public class UploadController : ApiController
{
public async Task<HttpResponseMessage> PostFile()
{
// Check if the request contains multipart/form-data.
if (!Request.Content.IsMimeMultipartContent())
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
var root = HttpContext.Current.Server.MapPath("~/App_Data");
var provider = new MultipartFormDataStreamProvider(root);
try
{
// Read the form data and return an async task.
await Request.Content.ReadAsMultipartAsync(provider);
var fileName = "";
// get the uploaded files.
foreach (var data in provider.FileData)
{
var file = new FileInfo(data.LocalFileName);
// TODO: handle received file here
}
if (string.IsNullOrEmpty(fileName))
{
return Request.CreateResponse(HttpStatusCode.UnsupportedMediaType);
}
return Request.CreateResponse(HttpStatusCode.OK);
}
catch (System.Exception e)
{
return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, e);
}
}
}
Hope this will help.