How do you upload a chunked video to twitter using node - javascript

How would you upload a video to twitter using the POST media/upload (chunked) endpoint with node?

This goes through all of the steps outlined in the link above: INIT, APPEND, FINALIZE and STATUS
var bufferLength, filePath, finished, fs, oauthCredentials, offset, request, segment_index, theBuffer;
request = require('request');
fs = require('fs');
filePath = '/thevideo.mp4';
bufferLength = 1000000;
theBuffer = new Buffer(bufferLength);
offset = 0;
segment_index = 0;
finished = 0;
oauthCredentials = {
consumer_key: '',
consumer_secret: '',
token: '',
token_secret: ''
};
fs.stat(filePath, function(err, stats) {
var formData, normalAppendCallback, options;
formData = {
command: "INIT",
media_type: 'video/mp4',
total_bytes: stats.size
};
options = {
url: 'https://upload.twitter.com/1.1/media/upload.json',
oauth: oauthCredentials,
formData: formData
};
normalAppendCallback = function(media_id) {
return function(err, response, body) {
finished++;
if (finished === segment_index) {
options.formData = {
command: 'FINALIZE',
media_id: media_id
};
request.post(options, function(err, response, body) {
console.log('FINALIZED',response.statusCode,body);
delete options.formData;
//Note: This is not working as expected yet.
options.qs = {
command: 'STATUS',
media_id: media_id
};
request.get(options, function(err, response, body) {
console.log('STATUS: ', response.statusCode, body);
});
});
}
};
};
request.post(options, function(err, response, body) {
var media_id;
media_id = JSON.parse(body).media_id_string;
fs.open(filePath, 'r', function(err, fd) {
var bytesRead, data;
while (offset < stats.size) {
bytesRead = fs.readSync(fd, theBuffer, 0, bufferLength, null);
data = bytesRead < bufferLength ? theBuffer.slice(0, bytesRead) : theBuffer;
options.formData = {
command: "APPEND",
media_id: media_id,
segment_index: segment_index,
media_data: data.toString('base64')
};
request.post(options, normalAppendCallback(media_id));
offset += bufferLength;
segment_index++
}
});
});
});

Please try this
const splitFile = require('split-file')
const Twitter = require('twitter')
const fs = require('fs-extra')
const Promise = require('bluebird')
const pathToMovie = __dirname + '/test/152.mp4';
const mediaType = 'video/mp4' // `'video/mp4'` is also supported
let Names
const mediaSize = require('fs').statSync(pathToMovie).size
/* Twitter support Maximum 15MB video files. So we need to split this
file in to three files */
splitFile.splitFile(pathToMovie, 3)
.then((names) => {
Names = names
return init()
})
.catch((err) => {
console.log('Error: ', err)
})
const client = new Twitter({
consumer_key: '<your consumer_key >',
consumer_secret: '<your consumer_secret >',
access_token_key: '<your access_token_key >',
access_token_secret: '<access_token_secret>'
});
const init = () => {
initTweetUpload(mediaSize, mediaType) // Declare that you wish to upload some media
.then(appendTweetUpload) // Send the data for the media
.then(appendTweetUpload) // Send the data for the media
.then(appendTweetUpload) // Send the data for the media
.then(finalizeTweetUpload) // Declare that you are done uploading chunks
// eslint-disable-next-line promise/always-return
.then((data) => {
const status = {
media_ids: data,
status: 'NodeJS Media Upload',
}
client.post('statuses/update', status, (error, tweet, response) => {
console.log(error)
console.log(tweet)
})
}).catch((err) => {
console.log('Error: ', err)
})
}
const initTweetUpload = (mediaSize, mediaType) => makePost('media/upload',
{
command: 'INIT',
total_bytes: mediaSize,
media_type: mediaType,
}).then((data) => data.media_id_string)
let i = 0
const appendTweetUpload = (mediaId) => {
const p = Names.shift()
/* mediaData is the raw binary file content being uploaded ,It must be
<= 5 MB */
const mediaData = fs.readFileSync(p)
return makePost('media/upload', {
command: 'APPEND',
media_id: mediaId,
media: mediaData,
segment_index: i++,
}).then((data) => mediaId)
}
const finalizeTweetUpload = (mediaId) => makePost('media/upload', {
command: 'FINALIZE',
media_id: mediaId,
}).then((data) => mediaId)
const makePost = (endpoint, params) =>
// params.media_category = 'tweet_video';
new Promise((resolve, reject) => {
client.post(endpoint, params, (error, data, response) => {
if (error) {
reject(error)
} else {
resolve(data)
}
})
})
dependencies
1. https://www.npmjs.com/package/twitter
2. https://www.npmjs.com/package/split-file

Related

How do I need to collect my data in the object for post request?

I just want to send request, but in the end of function my object is{img:'',text: ''}.
I expect to get .jpg file name in obj.img and text inside .txt file in obj.text. So here we go:
Maybe it's something about scope. I don't know.
const path = require('path');
const fs = require('fs');
const request = require('request');
const axios = require('axios');
let shortcode = '';
async function getData() {
const obj = {
img: '',
text: '',
};
await fetch('http://127.0.0.1:8000/api')
.then(response => response.json())
.then(data => (shortcode = data[data.length - 1].shortcode));
fs.readdir(
`path/${shortcode}`,
(err, data) => {
if (err) {
throw err;
}
data.forEach(file => {
if (path.extname(file) === '.jpg') {
obj.img = file;
} else if (path.extname(file) === '.txt') {
fs.readFile(
`path/${shortcode}/${file}`,
'utf-8',
(err, data) => {
if (err) {
throw err;
}
obj.text = data;
console.log(data);
},
);
}
});
},
);
console.log(obj);
await fetch('http://127.0.0.1:8000/instaData', {
method: 'POST',
body: JSON.stringify({
img: obj.img,
text: obj.text,
}),
headers: {
'Content-Type': 'application/json;charset=utf-8',
},
});
}
getData();

Azure function don't accept to create file on remote

I would download file on local the create a stream then send to an API.
In localhost files get created via blobClient.downloadToFile(defaultFile);
But When I deploy function it can not find file to stream, so I think that the download does not happen or in bad location.
I get this error
[Error: ENOENT: no such file or directory, open 'D:\home\site\wwwroot\importPbix\exampleName.pbix'
Here's my code
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.CONNEXION_STRING
);
const containerClient = blobServiceClient.getContainerClient(
params.containerName
);
const blobClient = containerClient.getBlobClient(process.env.FILE_LOCATION); // get file from storage
let blobData;
var defaultFile = path.join(params.baseDir, `${params.reportName}.pbix`); // use path module
let stream;
try {
blobData = await blobClient.downloadToFile(defaultFile);
console.log(blobData);
stream = fs.createReadStream(defaultFile);
} catch (error) {
params.context.log(error);
console.log(error);
}
var options = {
method: "POST",
url: `https://api.powerbi.com/v1.0/myorg/groups/${params.groupId}/imports?datasetDisplayName=${params.reportName}`,
headers: {
"Content-Type": "multipart/form-data",
Authorization: `Bearer ${params.accessToken} `,
},
formData: {
"": {
value: stream,
options: {
filename: `${params.reportName}.pbix`,
contentType: null,
},
},
},
};
//check if file keep in mem
return new Promise(function (resolve, reject) {
request(options, function (error, response) {
if (error) {
params.context.log(error);
reject(error);
} else {
params.context.log(response);
resolve(response.body);
}
fs.unlinkSync(defaultFile);
});
});
I found this post having same issue , that's why I user path module and passed __dirname to function params.baseDir.
If you want to download a file from Azure blob and read it as a stream, just try the code below, in this demo, I try to download a .txt file to a temp folder(you should create it first on Azure function)and print its content from the stream for a quick test:
module.exports = async function (context, req) {
const { BlockBlobClient } = require("#azure/storage-blob")
const fs = require('fs')
const connStr = '<connection string>'
const container = 'files'
const blobName = 'test.txt'
const tempPath = 'd:/home/temp/'
const tempFilePath = tempPath + blobName
const blobClient = new BlockBlobClient(connStr,container,blobName);
await blobClient.downloadToFile(tempFilePath).then(async function(){
context.log("download successfully")
let stream = fs.createReadStream(tempFilePath)
//Print text content,just check if stream has been readed successfully
context.log("text file content:")
context.log(await streamToString(stream))
//You can call your API here...
})
function streamToString (stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
context.res = {
body: 'done'
}
}
Result
File has been downloaded:
read as stream successfully:

Async/Await Method Issue

I am working on MERN Stack Application(Mean,Express,ReactJS,NodeJS). I have one issue is that I have many more methods in mlcontroller.js page and I call some methods on REST API so I call that methods under that API from mlrouter.js but all that API is Async so currently API takes data slots vise means I give u an example that in one time take 100 data from first method and then pass to another method and pass from all methods again come to first method and take next 100 data and repeat same process again but I need to take all data in one time means untill one method will not complete not move on another method how's that possible with node js?
I place my code here :
mlrouter.js
ensureApiAuthenticated,
authController.checkReadOnlyUser,
mlController.getAPIData,
mlController.getData,
mlController.getCRCDetails,
mlController.getDetails,
mlController.uploadData
)
MlController.js
async function getAPIData(req, res, next) {
try {
let loanboardapi = " ", dealersocket = " ";
loanboardapi = {
url: "https://loanboard.houstondirectauto.com/api/User/GetAuthorizationToken?username=amin#houstondirectauto.com&password=test#123",
method: "GET"
};
dealersocket = {
url: 'https://idms.dealersocket.com/api/authenticate/GetUserAuthorizationToken?username=ankur#houstondirectauto.com&password=H5d465#!ddfdd45dsfd688&InstitutionID=105815',
method: 'GET'
};
request(loanboardapi,
(err, res, body) => {
console.log("res = ", res);
console.log("body =", body);
loantoken = JSON.parse(body).token;
console.log(loantoken);
});
request(dealersocket,
(err, res, body) => {
console.log("res = ", res);
console.log("body =", body);
dealertoken = JSON.parse(body).Token;
console.log(dealertoken);
next();
});
}
catch (e) {
req.error = e;
next();
}
}
function getData(req, res, next) {
try {
let result;
request.get('https://idms.dealersocket.com/api/account/getaccountlist?token=' + dealertoken + '&LayoutID=2002313&PageNumber=1&accounttype=i&accountstatus=a,c,b,o,r,s,x',
(err, res, body) => {
console.log("res = ", res);
console.log("body =", body);
result = JSON.parse(body);
console.log(result);
totalpage = parseInt(result.TotalPages);
let resultdata = Object.assign({}, result.Data);
console.log(resultdata);
//getSSN(totalpage, dealertoken, next);
next();
})
}
catch (e) {
req.error = e;
next();
}
}
async function getCRCDetails(req,res,next) {
async.eachSeries(ssn, async (item) => {
let CBCOptions = {
method: "POST",
url: "https://loanboard.houstondirectauto.com/api/Report",
headers: {
"Content-Type": "application/json",
Cookie: "ci_session=udmojmlc5tfl3epbrmtvgu6nao2f031p",
},
body: JSON.stringify({
token: loantoken,
action: "CBCReport",
variables: {
ssn: item,
},
}),
};
let EMpInfoption = {
method: "POST",
url: "https://loanboard.houstondirectauto.com/api/Report",
headers: {
"Content-Type": "application/json",
Cookie: "ci_session=udmojmlc5tfl3epbrmtvgu6nao2f031p",
},
body: JSON.stringify({
token: loantoken,
action: "getEmployerInfo",
variables: {
ssn: item,
},
}),
};
try {
let resultCBCOptions = await requestpromise(CBCOptions);
let EmployerInfoOptions = await requestpromise(EMpInfoption);
console.log(resultCBCOptions)
console.log(EmployerInfoOptions)
CRCOptions.push(resultCBCOptions);
EmpOption.push(EmployerInfoOptions);
} catch (error) {
console.log(error);
}
},
() => {
next();
}
);
}
async function getDetails(req,res,next) {
for(let i =0;i<CRCOptions.length;i++){
for(let j=0;j<EmpOption.length;j++){
let resdata = JSON.parse(CRCOptions[i]);
console.log(resdata);
result = resdata.data.DigifiResponse;
console.log(result);
let bodydata = JSON.parse(EmpOption[i]).data;
let crcssn = JSON.parse(CRCOptions[i]).ssn;
let empssn = JSON.parse(EmpOption[i]).ssn;
console.log("CRCSSN=",crcssn);
console.log("EMPSSN=",empssn);
if(crcssn == empssn)
{
for(let r=0;r<result.length;r++){
let crcdata = result[r];
console.log(crcdata);
for(let b=0;b<bodydata.length;b++) {
let annual_income;
console.log(bodydata[b]);
let mergedata = Object.assign(crcdata, bodydata[b]);
console.log("merge", mergedata);
if (mergedata["IncomeFrequency"] == "Monthly") {
annual_income = (parseInt(mergedata["Income"]) * 12).toString();
console.log(annual_income);
}
else {
annual_income = mergedata["Income"];
}
let binary = {
"accounts_opened": mergedata["total_number_of_open_accounts"],
"bankruptcies": mergedata["total_number_of_bankruptcies"],
"collections": mergedata["total_number_of_collections"],
"credit_inquiries_last_6_months": mergedata["total_number_of_inquires_in_the_last_6_months"],
"past_due_accounts": mergedata["total_number_of_accounts_currently_past_due"],
"open_accounts": mergedata["total_number_of_open_accounts"],
"high_credit_limit": mergedata["total_credit_limit_amount"],
"annual_income": annual_income
}
console.log(binary);
let arraybinary = Object.assign({},binary);
console.log(arraybinary);
binarydata.push(arraybinary);
console.log(binarydata);
let categorical = {
"bankruptcies_last_18_months": mergedata["count_of_bankruptcies_last_24_months"],
"credit_inquiries_last_6_months": mergedata["count_of_auto_loan_inquiries_last_9_months"],
"months_since_most_recent_inquiry": mergedata["total_number_of_inquires_in_the_last_6_months"],
"ninety_plus_delinquencies_last_18_months": mergedata["total_number_of_accounts_with_90180_day_delinquencies"],
"number_of_accounts_currently_30dpd": mergedata["total_number_of_accounts_with_3059_day_delinquencies"],
"open_credit_accounts": mergedata["total_number_of_open_auto_accounts"],
"pre_loan_debt_to_income": mergedata["total_amount_of_credit_debt"],
"total_current_balance": mergedata["total_account_balance"],
"total_high_credit_limit": mergedata["total_credit_limit_amount"],
"annual_income": annual_income
}
console.log(categorical);
let arraycategory = Object.assign({},categorical);
console.log(arraycategory);
categoricaldata.push(arraycategory);
let Linear = {
"bankruptcies_last_18_months": mergedata["count_of_bankruptcies_last_24_months"],
"credit_inquiries_last_6_months": mergedata["count_of_auto_loan_inquiries_last_9_months"],
"months_since_most_recent_inquiry": mergedata["total_number_of_inquires_in_the_last_6_months"],
"ninety_plus_delinquencies_last_18_months": mergedata["total_number_of_accounts_with_90180_day_delinquencies"],
"number_of_accounts_currently_30dpd": mergedata["total_number_of_accounts_with_3059_day_delinquencies"],
"open_credit_accounts": mergedata["total_number_of_open_auto_accounts"],
"pre_loan_debt_to_income": mergedata["total_amount_of_credit_debt"],
"total_current_balance": mergedata["total_account_balance"],
"total_high_credit_limit": mergedata["total_credit_limit_amount"],
"annual_income": annual_income
}
console.log(Linear);
let arraylinear = Object.assign({},Linear);
console.log(arraylinear);
Lineardata.push(arraylinear);
}
}
}
break;
}
}
console.log(binarydata.length);
console.log(binarydata);
converter.json2csv(binarydata,(err,csv) => {
if(err)throw err;
console.log(csv);
file.writeFileSync('/home/rita_gatistavam/Downloads/CSV/binarydata.csv',csv);
console.log('File Written');
})
converter.json2csv(Lineardata,(err,csv) => {
if(err)throw err;
console.log(csv);
file.writeFileSync('/home/rita_gatistavam/Downloads/CSV/lineardata.csv',csv);
console.log('File Written');
})
converter.json2csv(categoricaldata,(err,csv) => {
if(err)throw err;
console.log(csv);
file.writeFileSync('/home/rita_gatistavam/Downloads/CSV/categorydata.csv',csv);
console.log('File Written');
})
next();
}
async function uploadData(req,res,next){
let moduletype = sessionStorage.getItem('moduletype');
console.log(moduletype);
req.params.id = sessionStorage.getItem('modelid');
console.log(req.params.id);
try {
res.status(200).send({
status: 200,
timeout: 10000,
type: 'success',
text: 'Changes saved successfully!',
successProps: {
successCallback: 'func:window.createNotification',
},
responseCallback: 'func:this.props.reduxRouter.push',
pathname: `/ml/models/${req.params.id}/training/historical_data_${moduletype}`,
});
} catch (e) {
periodic.logger.warn(e.message);
res.status(500).send({ message: 'Error updating model type.', });
}
}```
Cannot understand your question, but I assume you want to get all the async request in one go.
You can achieve this with Promise.all, all the results will be returned as an array, and all the request will run at the same time.
const results = await Promise.all([asyncRequest1, asyncRequest2, asyncRequest3])
getting resulsts sequentially.
await asyncRequest1();
await asyncRequest2();
await asyncRequest3();

Can you append the result of multiple 'get' requests and send that out as a 'get 'response?

I've been toying with this for the past few hours and I can't for the life of me figure out why it's returning a list of nulls.
function getFile(file) {
var request = require("request");
request(
{
uri:
file,
method: "GET",
gzip: true,
qs: {
format: "json",
},
},
(err, resp, body) => {
console.log(body);
return body
}
)
}
async function download_tire_json_from_firebase() {
let files = ['https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fforceum.json?alt=media&token=ed9db67c-d744-414f-8812-21cfbba63447',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgood-year.json?alt=media&token=1131b553-e2e5-4ff6-87cb-f346e57e389e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgt-radial.json?alt=media&token=d2ba3f51-cef7-494e-8006-61d321d677cd',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fkpatos.json?alt=media&token=75743c44-1521-4ff4-8133-e56a474f7c3e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fland-spider.json?alt=media&token=22a3aa7b-18a9-4bd1-b34b-5ae48cadbc55',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fleao-tires.json?alt=media&token=84dcc7f8-de32-46a9-9aae-854639772975',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fwanli.json?alt=media&token=4bb13f3a-5d9a-4c2d-b558-964ee996a534',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fzeta.json?alt=media&token=c04f973c-2577-4f06-805e-68e46480e9ae'
]
const results = []
for(const file of files) {
results.push(getFile(file));
}
console.log("Final Result:", results)
return await Promise.all(results)
}
app.get("/get-tire-data", async (req, res) => {
let results = await download_tire_json_from_firebase()
res.send(JSON.stringify(results))
})
What's odd is that in my node console, '' console.log("Final Result:", results) '', prints out the expected list. It's as if somehow that list isn't making it to the 'get-tire-data' get request.
Also if anyone knows how to simplify the above code, I'd also appreciate that thoroughly.
You need to return a Promise in getFile
function getFile(file) {
var request = require("request");
return new Promise((resolve, reject) => {
request(
{
uri:
file,
method: "GET",
gzip: true,
qs: {
format: "json",
},
},
(err, resp, body) => {
if (err) {
reject(err)
} else {
resolve(body)
}
}
)
})
}
You can simplify your code with a library I created.
const { map } = require('rubico')
const request = require('request')
const urls = [
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fforceum.json?alt=media&token=ed9db67c-d744-414f-8812-21cfbba63447',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgood-year.json?alt=media&token=1131b553-e2e5-4ff6-87cb-f346e57e389e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgt-radial.json?alt=media&token=d2ba3f51-cef7-494e-8006-61d321d677cd',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fkpatos.json?alt=media&token=75743c44-1521-4ff4-8133-e56a474f7c3e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fland-spider.json?alt=media&token=22a3aa7b-18a9-4bd1-b34b-5ae48cadbc55',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fleao-tires.json?alt=media&token=84dcc7f8-de32-46a9-9aae-854639772975',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fwanli.json?alt=media&token=4bb13f3a-5d9a-4c2d-b558-964ee996a534',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fzeta.json?alt=media&token=c04f973c-2577-4f06-805e-68e46480e9ae'
]
const getFile = url => new Promise((resolve, reject) => {
request({
uri: url,
method: "GET",
gzip: true,
qs: { format: "json" },
}, (err, resp, body) => err ? reject(err) : resolve(body))
})
// urls => tire_json_data
const download_tire_json_from_firebase = map(getFile)
app.get("/get-tire-data", async (req, res) => {
let results = await download_tire_json_from_firebase(urls)
res.send(JSON.stringify(results))
})
Promise.all expects an array of promises and so getFile method should return a promise.
const request = require('request');
const files = [
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fforceum.json?alt=media&token=ed9db67c-d744-414f-8812-21cfbba63447',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgood-year.json?alt=media&token=1131b553-e2e5-4ff6-87cb-f346e57e389e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fgt-radial.json?alt=media&token=d2ba3f51-cef7-494e-8006-61d321d677cd',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fkpatos.json?alt=media&token=75743c44-1521-4ff4-8133-e56a474f7c3e',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fland-spider.json?alt=media&token=22a3aa7b-18a9-4bd1-b34b-5ae48cadbc55',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fleao-tires.json?alt=media&token=84dcc7f8-de32-46a9-9aae-854639772975',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fwanli.json?alt=media&token=4bb13f3a-5d9a-4c2d-b558-964ee996a534',
'https://firebasestorage.googleapis.com/v0/b/moberra-tire-api.appspot.com/o/tire-data-eco%2Fzeta.json?alt=media&token=c04f973c-2577-4f06-805e-68e46480e9ae'
];
const getFile = (url) => new Promise((resolve, reject) => {
request({
uri: url,
method: "GET",
gzip: true,
qs: {
format: "json"
},
},
(err, resp, body) => err ? reject(err) : resolve(body)
)
});
app.get("/get-tire-data", async(req, res) => {
const results = await Promise.all(files.map(url => getFile(url)));
res.send(JSON.stringify(results))
});

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

Categories

Resources