how to read file contents using axios and a file URL? [duplicate] - javascript

I'm using axios on server side.
I want to download big files .. technically this should be used with byte-ranges
Does axios handle the byte-range request so that the callback function is only called when all the response is ready
If 1 is not true, should I handle data chunks myself ?
In the code below :
axios({
url: params.url,
method: 'GET',
responseType: 'stream' // important
}).then(function (response) {
logger.debug('__download() : done!')
let contentType = response.headers['content-type']
let contentLength = response.headers['content-length']
var writer = new streams.WritableStream()
response.data.pipe(writer)
// ....
})
Am I supposed to wait for something like response.on('end')?
The purpose of what I'm doing is to get the size of the buffer (which I could get by writer.getBuffer())
Thanks for any hint !

I found out that to download the stream, in memory in my case, I had to wait for the event on my writer (writer.on('finished',cb) vs response.on('end', cb )) (not sure if there is something like response.on('end'))...
var stream = require('stream');
var util = require('util');
var Writable = stream.Writable;
function MemoryStream(options) {
if (!(this instanceof MemoryStream)) {
return new MemoryStream(options);
}
Writable.call(this, options); // init super
}
util.inherits(MemoryStream, Writable);
MemoryStream.prototype._write = function (chunk, enc, cb) {
var buffer = (Buffer.isBuffer(chunk)) ?
chunk :
Buffer.alloc(chunk, enc);
if (Buffer.isBuffer(this.memStore)) {
this.memStore = Buffer.concat([this.memStore, buffer]);
} else {
this.memStore = buffer
}
cb();
};
MemoryStream.prototype.toBuffer = function () {
return this.memStore
};
module.exports = MemoryStream
and then in my download function :
axios({
url: params.url,
method: 'GET',
responseType: 'stream' // important
}).then(function (response) {
logger.debug('__download() : done!')
let contentType = response.headers['content-type']
let contentLength = response.headers['content-length']
var writer = new MemoryStream()
response.data.pipe(writer)
writer.on('finish', function () {
var b = writer.toBuffer()
let computedContentLength = b.byteLength
if (!contentLength) { contentLength = computedContentLength }
return callback(null, { 'foo':'bar'})
});
})

Related

Occasionally uploading larger files

On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}
You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}

Azure function don't accept to create file on remote

I would download file on local the create a stream then send to an API.
In localhost files get created via blobClient.downloadToFile(defaultFile);
But When I deploy function it can not find file to stream, so I think that the download does not happen or in bad location.
I get this error
[Error: ENOENT: no such file or directory, open 'D:\home\site\wwwroot\importPbix\exampleName.pbix'
Here's my code
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.CONNEXION_STRING
);
const containerClient = blobServiceClient.getContainerClient(
params.containerName
);
const blobClient = containerClient.getBlobClient(process.env.FILE_LOCATION); // get file from storage
let blobData;
var defaultFile = path.join(params.baseDir, `${params.reportName}.pbix`); // use path module
let stream;
try {
blobData = await blobClient.downloadToFile(defaultFile);
console.log(blobData);
stream = fs.createReadStream(defaultFile);
} catch (error) {
params.context.log(error);
console.log(error);
}
var options = {
method: "POST",
url: `https://api.powerbi.com/v1.0/myorg/groups/${params.groupId}/imports?datasetDisplayName=${params.reportName}`,
headers: {
"Content-Type": "multipart/form-data",
Authorization: `Bearer ${params.accessToken} `,
},
formData: {
"": {
value: stream,
options: {
filename: `${params.reportName}.pbix`,
contentType: null,
},
},
},
};
//check if file keep in mem
return new Promise(function (resolve, reject) {
request(options, function (error, response) {
if (error) {
params.context.log(error);
reject(error);
} else {
params.context.log(response);
resolve(response.body);
}
fs.unlinkSync(defaultFile);
});
});
I found this post having same issue , that's why I user path module and passed __dirname to function params.baseDir.
If you want to download a file from Azure blob and read it as a stream, just try the code below, in this demo, I try to download a .txt file to a temp folder(you should create it first on Azure function)and print its content from the stream for a quick test:
module.exports = async function (context, req) {
const { BlockBlobClient } = require("#azure/storage-blob")
const fs = require('fs')
const connStr = '<connection string>'
const container = 'files'
const blobName = 'test.txt'
const tempPath = 'd:/home/temp/'
const tempFilePath = tempPath + blobName
const blobClient = new BlockBlobClient(connStr,container,blobName);
await blobClient.downloadToFile(tempFilePath).then(async function(){
context.log("download successfully")
let stream = fs.createReadStream(tempFilePath)
//Print text content,just check if stream has been readed successfully
context.log("text file content:")
context.log(await streamToString(stream))
//You can call your API here...
})
function streamToString (stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
context.res = {
body: 'done'
}
}
Result
File has been downloaded:
read as stream successfully:

JS Functions to make an API call

I have the following code.
I'm trying to make an API call (retrieve) passing since (obj.since), therefore, every time I make the call the API does not retrieve all data. However, so far, I haven't found the way to get since from the last record on my database.
var express = require("express");
var article = require("../models/article");
var request = require('request');
article.findOne({}, {since:1, _id:0}, { sort: { 'since' : -1 } }, function (err,obj) {
var **dataString** = `'{"consumer_key":"XXXXX", "access_token":"XXXXXXX", "since":"${obj.since}"}'`;
});
var options = {
url: 'https://xxxxxxxxx.com/v3/get',
method: 'POST',
headers: headers,
body: **dataString**
}
function callback(error, response, body) {
if (!error && response.statusCode == 200) {
let package = JSON.parse(body);
for(var attributename in package.list){
var title = package.list[attributename]["given_title"] ;
var url = package.list[attributename]["given_url"] ;
var newArticle = {title: title, url: url, since: since}
article.create(newArticle, function(error, newlyCreated){
if(error){
console.log(error);
} else {
console.log(newlyCreated);
}
});
}
}
else {
console.log(error);
}
};;
request(options,callback)
How can I make an API call getting the obj.since from the database (MongoDB) and pass it to an object (options)?
You are doing async callback style operation in for loop which is causing this issue. I will change few things
Change findOne to have exec at the end so it returns promise
article.create already returns a promise if no callback specified.
Convert request to a promise style.
Use for..of loop to do async operation.
The code will look like this
var express = require("express");
var article = require("../models/article");
var request = require('request');
function hitApi(dataString) {
return new Promise((resolve, reject) => {
var options = {
url: 'https://xxxxxxxxx.com/v3/get',
method: 'POST',
headers: headers,
body: dataString
}
request(options, error, response, body => {
if (err) {
reject(err);
}
resolve(body);
});
});
}
async function perform() {
const dataString = await article.findOne({}, {since:1, _id:0}, { sort: { 'since' : -1 } }).exec();
const response = await hitApi(dataString);
const package = JSON.parse(response.body);
for (const attributename of package.list) {
var title = package.list[attributename]["given_title"] ;
var url = package.list[attributename]["given_url"] ;
var newArticle = {title: title, url: url, since: since}
const newlyCreated = await article.create(newArticle);
console.log(newlyCreated);
}
}
You can then call perform function. There might be few syntax error but you will get an idea.

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

Execute second function after first function - REST

If the first REST function execute with sucess, the second it will be execute with the parameters of the first function, in the case return: sessionid and I save the value inside variable sessionid
Both functions are REST call within the same .js file.
In the case I trying:
My restApiCall.js file:
var Client = require('./lib/node-rest-client').Client;
var client = new Client();
var dataLogin = {
data: { "userName":"xxxxx","password":"xxxxxxxxxx","platform":"xxxx" },
headers: { "Content-Type": "application/json" }
};
var numberOrigin = 350;
client.registerMethod("postMethod", "xxxxxxxxxxxxxxxxxx/services/login", "POST");
client.methods.postMethod(dataLogin, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
if(Buffer.isBuffer(data)){
data = data.toString('utf8');
console.log(data);
re = /(sessionID: )([^,}]*)/g;
match = re.exec(data);
var sessionid = match[2]
console.log(sessionid);
openRequest(sessionid, numberOrigin); // I try execute, but just the first execute if I type inside Prompt command: node restApiCall
}
});
// this is the second function I want execute after the first sucess
function openRequest(sessionid, numberOrigin){
var client = new Client();
numberOrigin+=1;
var dataRequest = {
data: {"sessionID":sessionid,"synchronize":false,"sourceRequest":{"numberOrigin":numberOrigin,"type":"R","description":"Test - DHC","userID":"xxxxxxxxxx","contact":{"name":"Sayuri Mizuguchi","phoneNumber":"xxxxxxxxxx","email":"xxxxxxxxxxxxxxxxxx","department":"IT Bimodal"},"contractID":"1","service":{"code":"504","name":"Deve","category":{"name":"Developers"}}} },
headers: { "Content-Type": "application/json" }
};
client.post("xxxxxxxxxxxxxxxxxxxxxxxxx/services/request/create", dataRequest, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
console.log(response);
});
}
Thanks advance.
In the case the problem is new Client(); because the client have definied with line 2 and dont necessary declare again.
I use this code and works fine.
var Client = require('./lib/node-rest-client').Client;
var client = new Client(); //defined
var dataLogin = {
data: { "userName":"xxxxx","password":"xxxxxxxxxx","platform":"xxxx" },
headers: { "Content-Type": "application/json" }
};
var numberOrigin = 350;
client.registerMethod("postMethod", "xxxxxxxxxxxxxxxxxx/services/login", "POST");
client.methods.postMethod(dataLogin, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
if(Buffer.isBuffer(data)){
data = data.toString('utf8');
console.log(data);
re = /(sessionID: )([^,}]*)/g;
match = re.exec(data);
var sessionid = match[2]
console.log(sessionid);
openRequest(sessionid, numberOrigin); // execute fine
}
});
function openRequest(sessionid, numberOrigin){
numberOrigin+=1;
var dataRequest = {
data: {"sessionID":sessionid,"synchronize":false,"sourceRequest":{"numberOrigin":numberOrigin,"type":"R","description":"Test - DHC","userID":"xxxxxxxxxx","contact":{"name":"Sayuri Mizuguchi","phoneNumber":"xxxxxxxxxx","email":"xxxxxxxxxxxxxxxxxx","department":"IT Bimodal"},"contractID":"1","service":{"code":"504","name":"Deve","category":{"name":"Developers"}}} },
headers: { "Content-Type": "application/json" }
};
client.post("xxxxxxxxxxxxxxxxxxxxxxxxx/services/request/create", dataRequest, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
console.log(data);
});
}

Categories

Resources