I need to upload images on Amazon S3 and to store the image's links received as a response into database.
I'm using Bluebird.js. Please tell me how to pass the values recorded from one Promise to another.
Promise.resolve()
.then(function() {
var promImageLoc = [];
var upload = $scope.uploaderPromo.queue;
$scope.uploaderPromo.uploadAll();
$scope.uploaderPromo.onSuccessItem = function(fileItem, response, status, headers) {
console.log('success response', response);
var promImage = promImageLoc.push(response.location);
return promImage;
}
console.log(promImage);
})
.then(function (promImageLoc) {
console.log(promImageLoc);
})
Update: I'm begginer at MEAN stack developer and using angular-file-upload and multer to pass file from client side to server. All I need (is love) now - make entry in mongo DB with follows link. Links come in callback one after another because files are sent one by one (POST) and i have finally entry but with empty arrays. I know the reason of that but I don't know how to solve it. Here is full code of function that sent data from client.
$scope.createPromo = function () {
Promise.resolve()
.then(function() {
var promImageLoc = [];
var upload = $scope.uploaderPromo.queue;
$scope.uploaderPromo.uploadAll();
async.eachSeries(upload, function(response) {
$scope.uploaderPromo.onSuccessItem = function(fileItem, response, status, headers) {
console.log('success response', response);
return promImageLoc.push(response.location);
}
})
})
.then(function() {
var promLogoLoc = [];
$scope.uploaderLogo.uploadAll();
$scope.uploaderLogo.onSuccessItem = function(fileItem, response, status, headers) {
console.log('success response', response);
var location = response.location;
return location;
}
promLogoLoc.push(location);
return promLogoLoc;
})
.then(function (promImageLoc, promLogoLoc) {
if ($scope.promZipCode == undefined) {
$scope.promZipCode = '07946';
}
$http.post("/promo/create", {
'promName': $cookieStore.get('User').user.username + '.' + $scope.promName, //username.promotion name
'promDesc': $scope.promDesc,
'promImage': promImageLoc,
'promLock': $scope.promLock,
'promLogo': promLogoLoc,
'promLogoPos': $scope.selectPos,
'promNotice': $scope.promNotice,
'promZipCode': $scope.promZipCode, //{obj}
'promRadius': $scope.promRadius
})
.success(function (data, status) {
console.log(status);
$state.go('home.promotion.promotions');
}).error(function (err) {
$scope.isUniqError = true;
console.log('CREATE ERR', err);
});
})
.catch(function (error) {
console.log('ERROR PROMISE:', error);
})
Related
Currently I have a small JSON file with a list of URL's to fetch data from, this amount can change at any time, so I have built a small function to request from the URL's and log a JSON response from each of them, but currently I need to combine the responses from all of them and send them in a res.json format.
app.post('/api/:db/find', async (req, res) => {
try {
const db = req.params.db;
databases.find({}, function(err, docs) {
for (const val of docs) {
var url = val.url + "/" + db + "/findmany"
axios.post(url, req.body)
.then(function(response) {
var o1 = response.data
var obj = Object.assign(o1);
console.log(obj)
//res.json(obj.data);
})
.catch(function(error) {
console.log(error)
res.json({ success: false });
});
}
});
} catch {
console.log(error)
res.json({ success: false });
}
});
I was thinking that the code would look something like this (Pseudo code)
app.post('/api/:db/find', async (req, res) => {
try {
const db = req.params.db;
dynamic(var)object
databases.find({}, function(err, docs) {
for (const val of docs) {
var url = val.url + "/" + db + "/findmany"
axios.post(url, req.body)
.then(function(response) {
add(response) to (var) object
})
.catch(function(error) {
console.log(error)
res.json({ success: false });
});
}
when function is finished (console.log[(var)]) with added JSON from all of the requests
});
} catch {
console.log(error)
res.json({ success: false });
}
});
I have seen solutions where the existing JSON data is overwritten if both the Response and JSON Object have the same value, and I want to avoid that.
I have tried storing the response data inside a var, but it would get overwritten if the data inside the var and the response had the same value.
I have also tried adding them together separately, but that makes numerous arrays, and I don't want that.
How you combine the json responses received completely depends on the information content of the response json. You can combine the responses as shown below or write custom logic to combine the jsons depending on actual responses and send the result.
app.post('/api/:db/find', async (req, res) => {
try {
const db = req.params.db;
const JsonAggreagate = []
databases.find({}, async function(err, docs) {
try {
for (const val of docs) {
var url = val.url + "/" + db + "/findmany"
let result = await axios.post(url, req.body)
//LOGIC FOR COMBINING JSON RESPONSES
JsonAggreagate.push({url: url, result : result})
}
} catch (error) {
console.log(error)
res.json({ success: false });
}
//when function is finished (console.log[(var)]) with added JSON from all of the requests
res.json({ success: true, result : JsonAggreagate});
});
} catch {
console.log(error)
res.json({ success: false });
}
});
I am fetching data from an API in order to show sales and finance reports, but I receive a type gzip file which I managed to convert into a Uint8Array. I'd like to somehow parse-decode this into a JSON file that I can use to access data and create charts in my frontend with.
I was trying with different libraries (pako and cborg seemed to be the ones with the closest use cases), but I ultimately get an error Error: CBOR decode error: unexpected character at position 0
This is the code as I have it so far:
let req = https.request(options, function (res) {
console.log("Header: " + JSON.stringify(res.headers));
res.setEncoding("utf8");
res.on("data", function (body) {
const deflatedBody = pako.deflate(body);
console.log("DEFLATED DATA -----> ", typeof deflatedBody, deflatedBody);
console.log(decode(deflatedBody));
});
res.on("error", function (error) {
console.log("connection could not be made " + error.message);
});
});
req.end();
};
I hope anyone has stumbled upon this already and has some idea.
Thanks a lot!
Please visit this answer https://stackoverflow.com/a/12776856/16315663 to retrieve GZIP data from the response.
Assuming, You have already retrieved full data as UInt8Array.
You just need the UInt8Array as String
const jsonString = Buffer.from(dataAsU8Array).toString('utf8')
const parsedData = JSON.parse(jsonString)
console.log(parsedData)
Edit
Here is what worked for me
const {request} = require("https")
const zlib = require("zlib")
const parseGzip = (gzipBuffer) => new Promise((resolve, reject) =>{
zlib.gunzip(gzipBuffer, (err, buffer) => {
if (err) {
reject(err)
return
}
resolve(buffer)
})
})
const fetchJson = (url) => new Promise((resolve, reject) => {
const r = request(url)
r.on("response", (response) => {
if (response.statusCode !== 200) {
reject(new Error(`${response.statusCode} ${response.statusMessage}`))
return
}
const responseBufferChunks = []
response.on("data", (data) => {
console.log(data.length);
responseBufferChunks.push(data)
})
response.on("end", async () => {
const responseBuffer = Buffer.concat(responseBufferChunks)
const unzippedBuffer = await parseGzip(responseBuffer)
resolve(JSON.parse(unzippedBuffer.toString()))
})
})
r.end()
})
fetchJson("https://wiki.mozilla.org/images/f/ff/Example.json.gz")
.then((result) => {
console.log(result)
})
.catch((e) => {
console.log(e)
})
Thank you, I actually just tried this approach and I get the following error:
SyntaxError: JSON Parse error: Unexpected identifier "x"
But I managed to print the data in text format using the below function:
getFinancialReports = (options, callback) => {
// buffer to store the streamed decompression
var buffer = [];
https
.get(options, function (res) {
// pipe the response into the gunzip to decompress
var gunzip = zlib.createGunzip();
res.pipe(gunzip);
gunzip
.on("data", function (data) {
// decompression chunk ready, add it to the buffer
buffer.push(data.toString());
})
.on("end", function () {
// response and decompression complete, join the buffer and return
callback(null, buffer.join(""));
})
.on("error", function (e) {
callback(e);
});
})
.on("error", function (e) {
callback(e);
});
};
Now I would need to pass this into a JSON object.
I am trying to use async with node.js to handle multiple incoming POST requests to edit a JSON file. No matter how I refactor it, it will always make one of the edits and not the other. I though that using async.queue would force the operations to handle sequentially? What am I doing wrong?
My code:
var editHandler = function(task, done) {
var req = task.req;
var res = task.res;
fs.stat( "./app//public/json/" + "data.json", function(err, stat) {
if(err == null) {
console.log('File exists');
} else if(err.code == 'ENOENT') {
console.log("Error");
} else {
console.log('Some other error: ', err.code);
}
});
console.log(req.params.id);
console.log(req.body);
fs.readFile( "./app//public/json/" + "data.json", 'utf8', function (err, data) {
data = JSON.parse( data );
data[req.params.id] = req.body.school;
//console.log( data );
fs.writeFile("./app//public/json/" + "data.json", JSON.stringify(data), function (err){
if(err) {
return console.log(err);
}
})
res.redirect('/');
});
};
//Make a queue for the services
var serviceQ = async.queue(editHandler, 20);
serviceQ.drain = function() {
console.log('all services have been processed');
}
app.post('/edit_school/:id', function(req, res) {
serviceQ.push({req: req, res: res })
})
Thanks in advance for any insights! I am really new to using node.js for anything other than npm/webpack.
I want to update my mongodb I know what to write on the server side but i dont know how to use it on the client side in angular. can you help ?
here is my server side code
module.exports.updateUser = function (req, res) {
// get a user with ID of 1
User.findById(1, function(err, user) {
if (err) throw err;
// change the users location
user.location = 'uk';
// save the user
user.save(function(err) {
if (err) throw err;
console.log('User successfully updated!');
});
});
}
You need to create a rest api(/users/save)
var users = require('./src/servies/users');
app.post('/users/save', users.updateUser);
that will call your updateUser function.
In angular you can use http module something like below code
<script>
var app = angular.module("app", []);
app.controller("HttpPostController", function ($scope, $http) {
$scope.SendData = function () {
// use $.param jQuery function to serialize data from JSON
var data = $.param({
location: $scope.location
});
var config = {
headers : {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
}
}
// calling post /users/save api from angular code
$http.post('/users/save', data, config)
.success(function (data, status, headers, config) {
})
.error(function (data, status, header, config) {
});
};
});
</script>
ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');