Video streaming in node.js - javascript

I want to stream the video using node.js. I have followed this article. And it's working fine from local. But I need to stream video from web.
My requirement is little different. I have to hide the original URL from source code. So, instead of showing original URL, I need to show my own URL like <source src="http://localhost:8888" type="video/mp4"/>. Instead of showing <source src="http://goo.gl/KgGx0s" type="video/mp4"/>.
Using the following codes
var indexPage, movie_webm, movie_mp4, movie_ogg;
fs.readFile(path.resolve(__dirname,"ANY_LOCAL_VIDEO.mp4"), function (err, data) {
if (err) {
throw err;
}
console.log(data.length);
movie_mp4 = data;
});
http.createServer(function (req, res) {
var reqResource = url.parse(req.url).pathname;
var total;
total = movie_mp4.length;
var range = req.headers.range;
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end-start)+1;
res.writeHead(206, { "Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type":"video/mp4"});
res.end(movie_mp4.slice(start, end+1), "binary");
}).listen(8888);
It's working fine for local videos. But if I give fs.readFile("http://goo.gl/KgGx0s", function (err, data) { instead of above code it's not working. I tried by changing fs.readFile to fs.filepath but still not working. I am getting this error
c:\git\personal\streaming-video-html5\server.js:13
throw err;
^
Error: ENOENT, open 'c:\git\personal\streaming-video-html5\http:\goo.gl\KgGx0s'
This may be because the path is changing. What approach should I follow? Is it possible?

What you're looking for is something like the request module.
You can simply fetch your remote video and then pipe it directly to your response.
What I'd do is first make a HEAD request to the remote video in order to confirm that it accepts byte ranges and also to get the content length of the video.
Once you've got that information you can set your response headers, with a HTTP status code of 206, indicating that this is a partial response.
Now that you've written your response headers you create a request to the remote video, sending along the byte-range in the original request and pipe that directly to your response (res);
var fileUrl = 'https://ia800300.us.archive.org/1/items/night_of_the_living_dead/night_of_the_living_dead_512kb.mp4';
var range = req.headers.range;
var positions, start, end, total, chunksize;
// HEAD request for file metadata
request({
url: fileUrl,
method: 'HEAD'
}, function(error, response, body){
setResponseHeaders(response.headers);
pipeToResponse();
});
function setResponseHeaders(headers){
positions = range.replace(/bytes=/, "").split("-");
start = parseInt(positions[0], 10);
total = headers['content-length'];
end = positions[1] ? parseInt(positions[1], 10) : total - 1;
chunksize = (end-start)+1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type":"video/mp4"
});
}
function pipeToResponse() {
var options = {
url: fileUrl,
headers: {
range: "bytes=" + start + "-" + end,
connection: 'keep-alive'
}
};
request(options).pipe(res);
}
You can probably cache the response from the HEAD request so that you don't need to do it each time a byte-range is requested.

Related

NodeJS trying to use a URL inside of a fs.createfilestream

I'm trying to do a post request onto my api, the api works perfectly ( I am able to post files, but not through a url), but now I'm trying to post through an url.
this is the code I have now, I removed some lines that aren't relevant to the question or were for testing.
request({
url: url + "gettoken"
, json: true
}, function (error, response, body) {
user = body;
var rs = fs.createReadStream(up.url);
var ws = request.post(url + "upload?token=" + `${user.token}&key=${user.key}&filename=${filename}`);
ws.on('drain', function () {
rs.resume();
});
rs.on('end', function () {
console.log(filename);
});
ws.on('error', function (err) {
console.error('cannot send file ' + err);
});
rs.pipe(ws);
})
Can anyone please help me.
So the idea is to upload a file that's located at up.url to another server at url + "upload?...".
Since fs.createReadStream is meant to read local files, and not URL's, you need something that can create a stream from a URL (or rather, retrieve that URL and stream the response).
You can also use request for that:
request({
url: url + "gettoken",
json: true
}, function (error, response, body) {
const user = body;
const rs = request.get(up.url);
const ws = request.post(url + "upload?token=" + `${user.token}&key=${user.key}&filename=${filename}`);
rs.on('end', function () {
console.log(filename);
});
ws.on('error', function (err) {
console.error('cannot send file ' + err);
});
rs.pipe(ws);
});
Typically, file uploads work through multipart/form-data, but your code doesn't suggest that being used here. If it is, the code would become something like this:
const ws = request.post(url + "upload?token=" + `${user.token}&key=${user.key}&filename=${filename}`, {
formData : {
the_file : rs
}
});
// no `rs.pipe(ws)`

Uploading Images using multipart/form-data with node.js using the in-build http module

So I need to send two images and an api key as part of a multipart/form-data http request to an api. I'm receiving the images from an aws s3 bucket and that's working perfectly, but whenever I try to send the image as part of the form-data I get an EPIPE http error. Somehow, the request gets canceled before all the data was received by the api. I tried the same using postman and everything works well, it's just my node program that doesn't seem able to achieve this. Please find below the code snippet:
const http = require('http')
const https = require('https')
const AWS = require('aws-sdk')
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
//simple http post request, there doesn't seem to be anything wrong with it
const httpPromise = (protocol, params, postData) => {
return new Promise((resolve, reject) => {
const requestModule = protocol === 'http' ? http : https;
const req = requestModule.request(params, res => {
// grab request status
const statusCode = res.statusCode;
if(statusCode < 200 || statusCode > 299) {
throw new Error(`Request Failed with Status Code: ${status}`);
}
let body = '';
// continuosly update data with incoming data
res.setEncoding('utf8');
res.on('data', data => body += data);
// once all data was received
res.on('end', () => {
console.log(body)
resolve(body)
});
})
// write data to a post request
if(typeof(params.method) === 'string' && params.method === 'POST' && postData) {
req.write(postData)
}
// bind to the error event
req.on('error', err => reject(err));
// end the request
req.end();
})
}
const handler = async (event) => {
// requestOption parameters
const apiKey = '000000';
const protocol = 'http';
const path = '/verify';
// set to the defined port, if the port is not defined set to default for either http or https
const port = Port ? Port : protocol === 'http' ? 80 : 443;
const hostname ='www.example.com';
const method = "POST";
const boundary = '__X_PAW_BOUNDARY__';
// get correct keys for the relevant images
const image1Key = 'image1Key';
const image2Key = 'image2Key';
const imageKeys = [image1, image2];
try {
// get the images, this works all as intended
const s3GetObjectPromises = [];
imageKeys.forEach(key => s3GetObjectPromises.push(
s3.getObject({Bucket: BucketName, Key: key})
.promise()
.then(res => res.Body)
))
const [image1, image2] = await Promise.all(s3GetObjectPromises);
//========== ALL GOOD TILL HERE ============
// THIS IS WHERE IT GETS PROBLEMATIC:
// create the postData formData string
const postData = "--" + boundary + "\r\nContent-Disposition: form-data; name=\"key\"\r\n\r\n" + apiKey + "\r\n--" + boundary + "Content-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\nContent-Type: image/jpeg \r\n\r\n" + image1 + "\r\n--" + boundary + "\r\nContent-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\nContent-Type: image/jpeg\r\n\r\n" + image2 + "\r\n--" + boundary + "--";
// the formData headers
const headers = {
"Content-Type":`multipart/form-data; charset=utf-8; boundary=${boundary}`,
"Content-Length": `${postData.length}`,
"User-Agent": "Paw/3.1.7 (Macintosh; OS X/10.14.0) GCDHTTPRequest"
}
// the options object
const options = {hostname, port, path, method, headers};
let result = await httpPromise(protocol, options, postData)
console.log(result)
return result;
} catch(err) {
console.log(err)
//this either throws an EPIPE error or it simply states that no key was available
throw err;
}
//execute the handler
handler()
.then(res => console.log(res))
.catch(err => console.log(err))
Alright, so after lots of trying around and experimentation I figured out why the above code wasn't working. First of all, the content-type in the postdata string should be set to image/ but that's a small one wasn't really a reason why this didn't work.
The EPIPE or Network error was due to the fact that I set the Content-Length header to the wrong length. Instead of simply setting it to the length of the string it has to be set to the ByteLength of the string. So simply replace 'Content-Length': postData.length.toString() with 'Content-Length': Buffer.byteLength(postData).toString(). That should solve the EPIPE error.
But there is an additional issue: I'm essentially converting the whole data to one big data string (postData) and sending the whole string in one req.write(postData) operation. So apparently this is not how one should do it (again after much experimentation), instead one should send an array containing the single lines of the data and then write each item of the array to the http request. So essentially:
// instead of this string:
const postData = "--" + boundary + "\r\nContent-Disposition: form-data; name=\"key\"\r\n\r\n" + apiKey + "\r\n--" + boundary + "Content-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\nContent-Type: image/jpeg \r\n\r\n" + image1 + "\r\n--" + boundary + "\r\nContent-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\nContent-Type: image/jpeg\r\n\r\n" + image2 + "\r\n--" + boundary + "--";
// use this array:
const postData = [`--${boundary}`, `\r\nContent-Disposition: form-data; name=\"key\"\r\n\r\n`, apiKey, `\r\n--${boundary}\r\n`, `Content-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\n`, `Content-Type: image/jpeg \r\n\r\n`, image1, `\r\n--${boundary}\r\n`, `Content-Disposition: form-data; name=\"image1\"; filename=\"IMG_7264.JPG\"\r\n`, `Content-Type: image/jpeg\r\n\r\n`, image2, `\r\n--${boundary}--`];
and then in the actual request you have to write this array to the http request item by item:
// instead of simply
req.write(postData)
// do:
for(let data of postData) {
req.write(data);
}
also, make sure to add some functionality for the content-length header calculation that takes into account that the body is now stored in an array, something like this should do the job:
const postDataLength = postData.reduce((acc, curr) => acc += Buffer.byteLength(curr), 0)
And then simply set the Content-Length header attribute equal to postDataLength.
Hope this helps anyone trying to build a form-data post request from scratch instead of using a third-party library like request which would also sort this out for you.

Writing an image to file, received over an HTTP request in Node

I'm certain I'm missing something obvious, but the gist of the problem is I'm receiving a PNG from a Mapbox call with the intent of writing it to the file system and serving it to the client. I've successfully relayed the call, received a response of raw data and written a file. The problem is that my file ends up truncated no matter what path I take, and I've exhausted the answers I've found skirting the subject. I've dumped the raw response to the log, and it's robust, but any file I make tends to be about a chunk's worth of unreadable data.
Here's the code I've got at present for the file making. I tried this buffer move as a last ditch after several failed and comparably fruitless iterations. Any help would be greatly appreciated.
module.exports = function(req, res, cb) {
var cartography = function() {
return https.get({
hostname: 'api.mapbox.com',
path: '/v4/mapbox.wheatpaste/' + req.body[0] + ',' + req.body[1] + ',6/750x350.png?access_token=' + process.env.MAPBOX_API
}, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var mapPath = 'map' + req.body[0] + req.body[1] + '.png';
var map = new Buffer(body, 'base64');
fs.writeFile(__dirname + '/client/images/maps/' + mapPath, map, 'base64', function(err) {
if (err) throw err;
cb(mapPath);
})
})
});
};
cartography();
};
It is possible to rewrite your code in more compact subroutine:
const fs = require('fs');
const https = require('https');
https.get(url, (response)=> { //request itself
if(response) {
let imageName = 'image.png'; // for this purpose I usually use crypto
response.pipe( //pipe response to a write stream (file)
fs.createWriteStream( //create write stream
'./public/' + imageName //create a file with name image.png
)
);
return imageName; //if public folder is set as default in app.js
} else {
return false;
}
})
You could get original name and extension from url, but it safer to generate a new name with crypto and get file extension like i said from url or with read-chunk and file-type modules.

Double quotes being added when posting form in Node.js

I'm posting to an ASP.NET MVC server using:
var request = require("request");
...
var formData = {
designerCode: data.designerCode,
designCode: data.designCode,
designName: data.designName,
package: zipBuffer
}
var url = constants.upload_zip_url + "/job/upload/" + encodeURIComponent(data.jobId) + "/" + encodeURIComponent(data.clientId);
request.post({
url: url,
headers: {"Authorization": "Bearer " + pniGlobalAuthToken},
formData: formData
}, function postCallback(err, response, body) {
console.log(err, response, body);
})
On the server I have:
using (var requestContent = Request.Content)
{
var contents = (await requestContent.ReadAsMultipartAsync()).Contents;
var contentDic = contents.ToDictionary(c => c.Headers.ContentDisposition.Name);
However at this point:
contentDic.Keys
Count = 4
[0]: "\"designCode\""
[1]: "\"designName\""
[2]: "\"package\""
[3]: "\"designerCode\""
So the request module is adding quotes around the form key values. Is it hard coded, or is there a parameter to turn it off? I looked at the documentation, but can't find anything:https://github.com/request/request
I can always strip the quotes on the server, but this is annoying. Request is the most convenient Node module that I've found. Has anyone else encountered something similar?

Upload a large file using nodejs

I have the following nodejs code which uploads a file by calling a server-side API (written by me) and passing the file content as a multi-part request. The problem is that my code works perfectly with small files but it fails with large files (1 MB or above). I'm pretty sure it's a problem in my code but I'm not able to find out what it is.
// assume file content have been read into post_data array
//Make post
var google = http.createClient(443, host, secure = true);
var filepath = '/v2_0/put_file/';
var GMTdate = (new Date()).toGMTString();
var fileName = encodeURIComponent(destination);
console.log("fileName : " + fileName);
console.log("Path : " + filepath);
var header = {
'Host': host,
'Authorization': 'Basic ' + authStr,
'Content-Type': 'multipart/form-data; boundary=0xLhTaLbOkNdArZ',
'Last-Modified': GMTdate,
'Filename': fileName,
'Last-Access-By': username
};
var request = google.request('POST', filepath, header);
for (var i = 0; i < post_data.length; i++) {
request.write(post_data[i]);
}
request.end();
request.addListener('response', function(response){
var noBytest = 0;
response.setEncoding('utf8');
console.log('STATUS: ' + response);
console.log('STATUS: ' + response.statusCode);
console.log('HEADERS: ' + JSON.stringify(response.headers));
console.log('File Size: ' + response.headers['content-length'] + " bytes.");
From the logs, I see that control comes to request.end(); but I do not see the last few logs written after request.addListener() block.
I've been pulling my hair off for last couple of days trying to understand why it works for small files but not for larger files. I don't see any timeouts and the code just seems to be hung till I kill it off.
Can anyone guide me as to what am I doing wrong?
UPDATE:
post_data is an array, here is what I'm doing
post_data = [];
console.log('ContentType =' + ContentType + "\n\nEncoding Style =" + encodingStyle);
post_data.push(new Buffer(EncodeFilePart(boundary, ContentType, 'theFile', FileNameOnly), 'ascii'));
var file_contents = '';
var file_reader = fs.createReadStream(filename, {
encoding: encodingStyle
});
file_reader.on('data', function(data){
console.log('in data');
file_contents += data;
});
file_reader.on('end', function(){
post_data.push(new Buffer(file_contents, encodingStyle))
post_data.push(new Buffer("\r\n--" + boundary + "--\r\n", 'ascii'));
...
var request = google.request('POST', filepath, header);
for (var i = 0; i < post_data.length; i++) {
request.write(post_data[i]);
}
I look forward to your suggestions.
You should be passing either an array or a string to request.write . Is post_data an array of strings, or an array of arrays?
Also, you are posting it as multipart/form-data, so that means you have to modify your data to that format. Have you done so, or is post_data just the raw data from a file?
checkout node-formidable and this post http://debuggable.com/posts/parsing-file-uploads-at-500-mb-s-with-node-js:4c03862e-351c-4faa-bb67-4365cbdd56cb

Categories

Resources