I am trying to build a web app to stream music. I use MongoDB to store the audio, a Node API to connect to the database and a Vuejs frontend. Below is the endpoint which streams the music, based on this article: https://medium.com/#richard534/uploading-streaming-audio-using-nodejs-express-mongodb-gridfs-b031a0bcb20f
trackRoute.get('/:trackID', (req, res) => {
try {
var trackID = new ObjectID(req.params.trackID);
} catch (err) {
return res.status(400).json({ message: "Invalid trackID in URL parameter. Must be a single String of 12 bytes or a string of 24 hex characters" });
}
res.set('content-type', 'audio/mp3');
res.set('accept-ranges', 'bytes');
let bucket = new mongodb.GridFSBucket(db, {
bucketName: 'tracks'
});
let downloadStream = bucket.openDownloadStream(trackID);
downloadStream.on('data', (chunk) => {
res.write(chunk);
});
downloadStream.on('error', () => {
res.sendStatus(404);
});
downloadStream.on('end', () => {
res.end();
});
});
I tested it with Postman and it works there. I am trying to read the stream in my Vuejs application. I'm just not sure how to do it. I tried the following to test it:
const url = 'http://localhost:4343/api/track/6061c90b2658b9001e65311d';
http.get(url, function (res) {
res.on('data', function (buf) {
console.log(buf);
});
res.on('end', function () {
console.log('ended');
});
})
This does not work however. How should I go about reading it in the frontend?
Related
I have found many posts on how to retrieve the contents of a .txt file from the google drive with their API. I have tried using this:
const drive = google.drive({version: 'v3', auth});
var data = drive.files.get({
fileId: file_id,
alt: "media"
});
data.execute(function(response){
console.log(reponse)
})
My error
data.execute(function(response){
^
TypeError: data.execute is not a function
and also data.then instead of data.execute each time there is an error that I research and find no resolution to it. Could someone please give me an updated version of how to get the contents of a file from the file id? As I think the previous answers are outdated somewhat.
Sorry if this is pretty obvious. I relatively new to javascript and apis in general. So this would help me out a lot as it's the final stretch before I finish my program :)
Thanks, Mathias
When you run 'drive.files.get' for google drive API you receive a promise, and to get data you have to use then on it. This is how it works:
const filePath = `give_path_tosave_file`;
const dest = fs.createWriteStream(filePath);
let progress = 0;
drive.files.get(
{ fileId, alt: 'media' },
{ responseType: 'stream' }
).then(res => {
res.data
.on('end', () => {
console.log('Done downloading file.');
})
.on('error', err => {
console.error('Error downloading file.');
})
.on('data', d => {
d+='';
console.log(d);
//data will be here
// pipe it to write stream
}
})
.pipe(dest);
});
If above solution doesn't work, you can use this one. It is on official google website for doing the same:
var fileId = '1ZdR3L3qP4Bkq8noWLJHSr_iBau0DNT4Kli4SxNc2YEo';
var dest = fs.createWriteStream('/tmp/filename.txt');
drive.files.export({
fileId: fileId,
mimeType: 'application/txt'
})
.on('end', function () {
console.log('Done');
})
.on('error', function (err) {
console.log('Error during download', err);
})
.pipe(dest);
For more info, you should check here.
Also, the below method will return all the files which you have access to in drive.
drive.files.list({}, (err, res) => {
if (err) throw err;
const files = res.data.files;
if (files.length) {
files.map((file) => {
console.log(file);
});
} else {
console.log('No files found');
}
});
I'm uploading a data stream to Azure Storage,
I would get the link to the blob file.
let insertFile = async function (blobName,stream){
const containerName= 'texttospeechudio';
try{
await blobService.createContainerIfNotExists(containerName, {
publicAccessLevel: 'blob'},(err,result, response) => {
if(!err) {
console.log(result);
}
});
let resultstream = blobService.createWriteStreamToBlockBlob(containerName, blobName,(err,result, response)=>{
console.log(res)
});
stream.pipe(resultstream);
stream.on('error', function (error) {
console.log(error);
});
stream.once('end', function (end) {
console.log(end)
//OK
});
}
catch(err) {
console.log(err);
}
}
I added createWriteStreamToBlockBlob callback , but I'm not getting inside it.
I would find a way to get uploaded file url.
There is no file URL returned in the response according to put-blob's rest spec.
And Azure storage's resource URL can be commonly composed with following pattern:
https://{myaccount}.blob.core.windows.net/{mycontainer}/{myblob}
I have developed Google Cloud Function which calls an API hosted in AZURE.
However the function returns error
Error: function crashed.Details:
getaddrinfo ENOTFOUND https://bupanonproduction.azure-api.net https://bupanonproduction.azure-api.net:443
Below is the google cloud function
'use strict';
const http = require('https');
const host = 'https://bupanonproduction.azure-api.net';
exports.remaininglimits = (req, res) => {
// Call the API
callRemainingLimitsApi().then((output) => {
// Return the results from the API to API.AI
res.setHeader('Content-Type', 'application/json');
res.send(JSON.stringify({ 'speech': output, 'displayText': output }));
}).catch((error) => {
// If there is an error let the user know
res.setHeader('Content-Type', 'application/json');
res.send(JSON.stringify({ 'speech': error, 'displayText': error }));
});
};
function callRemainingLimitsApi () {
return new Promise((resolve, reject) => {
// Create the path for the HTTP request to get the weather
let path = '/api/Values';
console.log('API Request: ' + host + path);
// Make the HTTP request to get the weather
http.get({host: host, path: path, headers: {'Ocp-Apim-Subscription-Key':'0a6e2fa822ec4d7a821d7f286abb6990'}}, (res) => {
let body = ''; // var to store the response chunks
res.on('data', (d) => { body += d; }); // store each response chunk
res.on('end', () => {
// After all the data has been received parse the JSON for desired data
let response = JSON.parse(body);
let jasonString = JSON.stringify(response);
// Create response
let output = `Hi, your limit is ${jasonString}.`;
// Resolve the promise with the output text
console.log(output);
resolve(output);
});
res.on('error', (error) => {
reject(error);
});
});
});
}
When I use other public API such as below it returns correct result to the cloud function.
https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol=MSFT&apikey=demo
Any idea why the cloud function not recognizing the AZURE API url?
-Alan-
I just found out that the host should be defined without the prefix "https". That fixed the problem. I am using Free Trial with $300 credit and I am not sure if this is considered a paid plan.
I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)
I am trying to build an api from the braintree servers. Referring to this doc https://developers.braintreepayments.com/javascript+node/reference/general/result-handling/search-results
To access all the transactions from their server I have to return a node stream.
ex
app.get('/project', function(req, res) {
if(req.user) {
var stream = gateway.transaction.search(function (search) {
search.customerId().is(req.user.id);
});
stream.on("ready", function () {
console.log(stream.searchResponse);
});
stream.on("data", function (data) {
res.json(data) // can't set headers after they are sent.
});
}
});
I understand a stream returns data in chunks, so the res.json() above is most likely is being called multiple times resulting in Error: Can't set headers after they are sent.
So my question is how can I send that data to the client in one chunk? The nodejs streaming is confusing to me, I am going to read up more about it, but it would be great to understand how to send the data to the client without re-sending the headers.
You shouldn't make any assumptions about data events unless the stream you are reading from is in object mode. You could get one data event or a hundred (depending on the input size of course) because TCP is a stream.
What you probably want is something like this instead (assuming stream is not in object mode):
app.get('/project', function(req, res) {
if(req.user) {
var stream = gateway.transaction.search(function (search) {
search.customerId().is(req.user.id);
});
stream.on("ready", function () {
console.log(stream.searchResponse);
});
var buf = '';
stream.on("data", function (data) {
buf += data;
});
stream.on("end", function() {
res.setHeader('Content-Type', 'application/json');
res.send(buf);
});
}
});
Or just pipe the stream to the response:
app.get('/project', function(req, res) {
if(req.user) {
var stream = gateway.transaction.search(function (search) {
search.customerId().is(req.user.id);
});
stream.on("ready", function () {
console.log(stream.searchResponse);
});
res.setHeader('Content-Type', 'application/json');
stream.pipe(res);
}
});
For an object stream you might do:
app.get('/project', function(req, res) {
if(req.user) {
var stream = gateway.transaction.search(function (search) {
search.customerId().is(req.user.id);
});
stream.on("ready", function () {
console.log(stream.searchResponse);
});
var result = [];
stream.on("data", function (data) {
result.push(data);
});
stream.on("end", function() {
res.json(result);
});
}
});