How to upload image using AFNetworking + Sails.js - javascript

I have some codes from node.js project, it contains a post /uploadAvatar router, and a uploadAvatar function:
uploadAvatar: function(req, res) {
req.file('avatar').upload({
// don't allow the total upload size to exceed ~10MB
maxBytes: 10000000
},function whenDone(err, uploadedFiles) {
if (err) {
console.log(err);
return res.negotiate(err);
}
// If no files were uploaded, respond with an error.
if (uploadedFiles.length === 0){
return res.badRequest('No file was uploaded');
}
var avatarFd = uploadedFiles[0].fd;
var json = {fd: avatarFd};
console.log(json);
res.json(200, json);
});
}
Right now I want to using this api to upload image using AFNetworking. Current the swift code like below:
func uploadAvatar(avatar: NSData, success: JSONBlock, fail: ErrorBlock) -> Void {
sessionManager.POST("/uploadAvatar", parameters: nil, constructingBodyWithBlock: { (formData) in
formData.appendPartWithFormData(avatar, name: "avatar")
}, progress: { (_) in
}, success: { (_, responseObject) in
let json = self.jsonFromResponseObject(responseObject)
if json != nil {
success(json)
}
}) { (_, error) in
if let msgData = error.userInfo[AFNetworkingOperationFailingURLResponseDataErrorKey] as? NSData {
if let message = String(data: msgData, encoding: NSUTF8StringEncoding) {
print(message)
}
}
fail(error.localizedDescription);
}
}
The error response I get is: "No file was uploaded". It seems the node.js can't find my image data. What did I do wrong?

Related

send text data with formdata via Axios post

I am sending over a PDF file in formdata with an Axios post request. So that file will get uploaded/saved to a folder on the server. I'm using multer on the server to save the file. And that works great.
Now I also want to add some fields to the DB related to the file. One is the generated file name that gets generated right before the file is saved. So I don't want to make a round trip back to the client and then make another call out to the server to update the DB. So I want a send a few text strings along with the formdata. But no matter what I try, I cannot read any text data from that formdata object in my Node code. FYI I am using Express on my Node server.
Client side code that kicks off the upload process: (notice I am attempting to append additional fields to the formdata object)
const uploadFilesAsync = () => {
const data = new FormData();
const filenames = [];
uploadedFiles.forEach((f) => {
filenames.push(f.name);
data.append('file', f);
});
const fileInfo = {
customer: selectedCustomer,
load: selectedLoad,
filenames
};
data.append('customer', selectedCustomer);
data.append('load', selectedLoad);
data.append('filenames', filenames.toString());
// I also tried the following and then passing fileInfo in with data and setLoaded
const fileInfo = {customer: selectedCustomer, load: selectedLoad,
filenames: filenames.toString()};
uploadFiles(data, setLoaded)
.then((res) => {
console.log('uploadFiles res: ', res);
if (res.status === 200) {
// addFileInfoToDB(fileInfo)
// .then((r) => console.log('addFileInfoToDB: ', r))
// .catch((e) => console.log({ e }));
}
})
.catch((e) => console.log(e));
};
And then the client side function uploadFiles:
export const uploadFiles = (data, setLoaded) => {
console.log({ data });
const config = {
onUploadProgress: function(progressEvent) {
const percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
setLoaded(percentCompleted);
},
headers: {
'Content-Type': 'multipart/form-data'
}
};
// and then here if I passed in the fileInfo object, I tried sending `{data, fileInfo}`
// instead of just data, but that broke file upload portion too
return axios
.post(baseURL + '/SDS/upload', data, config)
.then((response) => {
console.log({ response });
return response;
})
.catch((e) => {
return Promise.reject(e);
});
};
And finally the server side function that does all the work:
static async uploadSDS(req, res) {
console.log(req.body);
let uploadSuccess = false;
upload(req, res, async function(err) {
if (err instanceof multer.MulterError) {
// return res.status(500).json({ Error1: err });
//return { status: 500 };
} else if (err) {
// return res.status(500).json({ Error2: err });
//return { status: 500 };
} else {
uploadSuccess = true;
}
console.log(uploadSuccess);
// return res.status(200).send(req.file);
//return { status: 200 };
// if (uploadSuccess) {
// try {
// const result = await SDS.addFileInfoToDB(req.fileInfo);
// if (result) {
// return res.status(200).json({ result });
// }
// } catch (e) {
// console.log(e);
// }
// }
});
}
When I console.log the req.body it is always empty.

Sending CSV records to Amazon Sagemaker through lambda function

I am looking to send CSV records coming from kinesis analytics to sagemaker endpoint and getting an inference through a lambda function and then passing it on to a firehose API to dump it into S3. But the data is not getting into sagemaker for some reason.
'use strict';
console.log('Loading function');
var AWS = require('aws-sdk');
var sagemakerruntime = new AWS.SageMakerRuntime({apiVersion: '2017-05-13'});
var firehose = new AWS.Firehose({apiVersion: '2015-08-04'});
exports.handler = (event, context, callback) => {
let success = 0;
let failure = 0;
const output = event.records.map((record) => {
/* Data is base64 encoded, so decode here */
const recordData = Buffer.from(record.data, 'base64');
try {
var params = {
Body: new Buffer('...') || recordData /* Strings will be Base-64 encoded on your behalf */, /* required */
EndpointName: 'String', /* required */
Accept: 'text/csv',
ContentType: 'text/csv'
};
sagemakerruntime.invokeEndpoint(params, function(err, data) {
var result1;
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
result1=data;
var params = {
DeliveryStreamName: 'String', /* required */
Record: { /* required */
Data: new Buffer('...') || result1 /* Strings will be Base-64 encoded on your behalf */ /* required */
}
};
firehose.putRecord(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
});
success++;
return {
recordId: record.recordId,
result: 'Ok',
};
} catch (err) {
failure++;
return {
recordId: record.recordId,
result: 'DeliveryFailed',
};
}
});
console.log(`Successful delivered records ${success}, Failed delivered records ${failure}.`);
callback(null, {
records: output,
});
};

How to Use Dropbox Upload Session For Files Larger than 150mb?

I want to upload a file larger than 150mb.
In the Dropbox API V2 docs, it says you should start an upload session.
The docs say you can't send a POST with more than 150mb of data, but I'm unsure how to achieve that with the upload_session API.
While any individual request shouldn't be larger than 150 MB (and typically you should use a significantly smaller chunk size), you can upload files larger than that by using multiple requests.
There's an example of using upload sessions below. That example uses the Python SDK, but the JavaScript SDK, but it should serve as a useful reference, as the logic is the same. (They both use the same underlying API.)
This uses the Dropbox Python SDK to upload a file to the Dropbox API from the local file as specified by file_path to the remote path as specified by dest_path. It also chooses whether or not to use an upload session based on the size of the file:
f = open(file_path)
file_size = os.path.getsize(file_path)
CHUNK_SIZE = 4 * 1024 * 1024
if file_size <= CHUNK_SIZE:
print dbx.files_upload(f.read(), dest_path)
else:
upload_session_start_result = dbx.files_upload_session_start(f.read(CHUNK_SIZE))
cursor = dropbox.files.UploadSessionCursor(session_id=upload_session_start_result.session_id,
offset=f.tell())
commit = dropbox.files.CommitInfo(path=dest_path)
while f.tell() < file_size:
if ((file_size - f.tell()) <= CHUNK_SIZE):
print dbx.files_upload_session_finish(f.read(CHUNK_SIZE),
cursor,
commit)
else:
dbx.files_upload_session_append(f.read(CHUNK_SIZE),
cursor.session_id,
cursor.offset)
cursor.offset = f.tell()
f.close()
You can quickly upload file chunks using files/upload_session/start, files/upload_session/append_v2 and files/upload_session/finish API endpoints. Here is an example which uses my tiny dropbox v2 api wrapper (dropbox-v2-api):
const CHUNK_LENGTH = 100;
//create read streams, which generates set of 100 (CHUNK_LENGTH) characters of values: 1 and 2
const firstUploadChunkStream = () => utils.createMockedReadStream('1', CHUNK_LENGTH);
const secondUploadChunkStream = () => utils.createMockedReadStream('2', CHUNK_LENGTH);
sessionStart((sessionId) => {
sessionAppend(sessionId, () => {
sessionFinish(sessionId);
});
});
function sessionStart(cb) {
dropbox({
resource: 'files/upload_session/start',
parameters: {
close: false
},
readStream: firstUploadChunkStream()
}, (err, response) => {
if (err) { return console.log('sessionStart error: ', err) }
console.log('sessionStart response:', response);
cb(response.session_id);
});
}
function sessionAppend(sessionId, cb) {
dropbox({
resource: 'files/upload_session/append_v2',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH
},
close: false,
},
readStream: secondUploadChunkStream()
}, (err, response) => {
if(err){ return console.log('sessionAppend error: ', err) }
console.log('sessionAppend response:', response);
cb();
});
}
function sessionFinish(sessionId) {
dropbox({
resource: 'files/upload_session/finish',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH * 2
},
commit: {
path: "/result.txt",
mode: "add",
autorename: true,
mute: false
}
}
}, (err, response) => {
if (err) { return console.log('sessionFinish error: ', err) }
console.log('sessionFinish response:', response);
});
}
I have an example!
testFile1Data = "test file data 1";
dbx.filesUploadSessionStart({
contents: testFile1Data,
close: true,
})
.then(function (response) {
file1Start = response;
})
.catch(function (err) {
console.log(err);
});
testFile2Data = "test file data 2";
dbx.filesUploadSessionStart({
contents: testFile2Data,
close: true,
})
.then(function (response) {
file2Start = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatch({entries: [
{cursor: {session_id: file1Start.session_id, offset: testFile1Data.length}, commit: {path: "/testFile1.txt"}},
{cursor: {session_id: file2Start.session_id, offset: testFile2Data.length}, commit: {path: "/testFile2.txt"}},
]})
.then(function (response) {
finishBatch = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatchCheck({async_job_id: finishBatch.async_job_id})
.then(function (response) {
finishBatch = response
})
.catch(function (err) {
console.log(err);
});
I got the example from an issue thread on github - https://github.com/dropbox/dropbox-sdk-js/issues/80#issuecomment-283189888

Uploading files using Skipper with Sails.js v0.10 - how to retrieve new file name

I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});

How upload a file to Dropbox with dropbox.js?

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Categories

Resources