I'm trying to send files from flutter using Dio package it works fine while sending one file to spring but it shows 400 bad requests with multiple files
I'm using spring webflux
Note that: I tested this method with Postman and it works fine
I searched about this but I got nothing
any help
#PostMapping(value = "/uploadfiles/{type}", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
#ResponseStatus(value = HttpStatus.OK)
public Mono<String> uploadMultipleFile(#RequestPart("files") Flux<FilePart> filePartFlux,
#PathVariable("type") String type) {
propertymediaIds = new ArrayList<>();
Mono<String> then = filePartFlux.flatMap(it -> {
try {
Path tempFile = Files.createTempFile("test", it.filename());
AsynchronousFileChannel channel = AsynchronousFileChannel.open(tempFile, StandardOpenOption.WRITE);
DataBufferUtils.write(it.content(), channel, 0).doOnComplete(() -> {
}).subscribe();
try {
File f = tempFile.toFile();
byte[] bArray = Files.readAllBytes(f.toPath());
Propertymedia propertymedia = new Propertymedia();
propertymedia.setBlob(bArray);
propertymedia.setBlobContentType(type);
Mono<ResponseEntity<Propertymedia>> savedpropertymedia = createPropertymedia(propertymedia);
Mono<String> id = savedpropertymedia.map(user -> user.getBody().getId());
id.subscribe(v -> {
System.out.println(v);
if (v != null) {
this.propertymediaIds.add(v);
System.out.println(this.propertymediaIds);
}
});
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException e1) {
e1.printStackTrace();
}
return Mono.just(propertymediaIds);
}).then(Mono.just("OK"));
return then;
}
this for loading image
Future<void> loadAssets() async {
List<Asset> resultList = List<Asset>();
List<File> fikles = List<File>();
String error = 'No Error Detected';
try {
resultList = await MultiImagePicker.pickImages(
maxImages: 6,
enableCamera: true,
selectedAssets: images,
cupertinoOptions: CupertinoOptions(takePhotoIcon: "chat"),
materialOptions: MaterialOptions(
actionBarColor: "#abcdef",
actionBarTitle: "Example App",
allViewTitle: "All Photos",
useDetailsView: false,
selectCircleStrokeColor: "#000000",
),
);
for (Asset i in resultList) {
File f = new File(i.getByteData().toString());
print(f.path);
}
} on Exception catch (e) {
error = e.toString();
}
// If the widget was removed from the tree while the asynchronous platform
// message was in flight, we want to discard the reply rather than calling
// setState to update our non-existent appearance.
if (!mounted) return;
setState(() {
images = resultList;
_error = error;
});
}
this for sending image to spring
Future<File> uploadImage() async {
try {
List<MultipartFile> multipart = List<MultipartFile>();
for (int i = 0; i < images.length; i++) {
var path =
await FlutterAbsolutePath.getAbsolutePath(images[i].identifier);
multipart.add(
await MultipartFile.fromFile(path, filename: 'myfile${i}.jpg'));
}
FormData imageFormData = FormData.fromMap({
"files": multipart,
});
print('Bearer ' + _userService.token);
Response response =
await Dio().post("http://10.0.2.2:8080/api/uploadfiles/image",
options: Options(headers: {
"Authorization": 'Bearer ' + _userService.token,
'content-type': 'multipart/form-data'
}),
data: imageFormData);
print("File upload response: $response");
print('done');
} catch (e) {
print("Exception Caught: $e");
}
}
Related
Javascript / Node.js importing html file
I'm making a node.js server which sends emails on demand. The variable "output" is what I want to send via email. When I use inline html it works fine, however I want to import a complete html file instead.
const { EmailClient } = require("#azure/communication-email");
const connectionString = "<ACS_CONNECTION_STRING>";
const sender = "<SENDER_EMAIL>";
const toRecipients = {
to: [
{ email: "<alice#contoso.com>", displayName: "Alice" },
],
};
const client = new EmailClient(connectionString);
const emailContent = {
subject: "Send email plain text - JS sample",
plainText: "",
// html: "<h3>Hi, this works</h3>", // WORKS
// html: "<object type="text/html" data="file.html"></object>", // // Doesn't work
html: "<link href="file.html" rel="import" />", // // Doesn't work
};
async function main() {
try {
const emailMessage = {
sender: sender,
content: emailContent,
importance: 'low',
recipients: toRecipients,
};
const sendResult = await client.send(emailMessage);
if (sendResult && sendResult.messageId) {
const messageId = sendResult.messageId;
if (messageId === null || messageId === undefined) {
console.log("Message Id not found.");
return;
}
console.log("Send email success, MessageId :", messageId);
let counter = 0;
const statusInterval = setInterval(async function () {
counter++;
try {
const sendStatusResult = await client.getSendStatus(messageId);
if (sendStatusResult) {
console.log(`Email status for {${messageId}} : [${sendStatusResult.status}]`);
if (sendStatusResult.status.toLowerCase() !== "queued" || counter > 12) {
clearInterval(statusInterval);
}
}
} catch (e) {
console.log("Error in checking send mail status: ",e);
}
}, 5000);
} else {
console.error("Something went wrong when trying to send this email: ", sendResult);
}
} catch (e) {
console.log("################### Exception occurred while sending email #####################", e);
}
}
main();
Help is much appreciated.
You can use fs.readFileSync to import your HTML file as a string.
const emailContent = {
...
html: fs.readFileSync('./file.html', 'utf8'),
...
}
Background
Javascript library for Microsoft Office add-ins allows you to get raw content of the DOCX file through getFileAsync() api, which returns a slice of up to 4MB in one go. You keep calling the function using a sliding window approach till you have reed entire content. I need to upload these slices to the server and the join them back to recreate the original DOCX file.
My attempt
I'm using axios on the client-side and busboy-based express-chunked-file-upload middleware on my node server. As I call getFileAsync recursively, I get a raw array of bytes that I then convert to a Blob and append to FormData before posting it to the node server. The entire thing works and I get the slice on the server. However, the chunk that gets written to the disk on the server is much larger than the blob I uploaded, normally of the order of 3 times, so it is obviously not getting what I sent.
My suspicion is that this may have to do with stream encoding, but the node middleware does not expose any options to set encoding.
Here is the current state of code:
Client-side
public sendActiveDocument(uploadAs: string, sliceSize: number): Promise<boolean> {
return new Promise<boolean>((resolve) => {
Office.context.document.getFileAsync(Office.FileType.Compressed,
{ sliceSize: sliceSize },
async (result) => {
if (result.status == Office.AsyncResultStatus.Succeeded) {
// Get the File object from the result.
const myFile = result.value;
const state = {
file: myFile,
filename: uploadAs,
counter: 0,
sliceCount: myFile.sliceCount,
chunkSize: sliceSize
} as getFileState;
console.log("Getting file of " + myFile.size + " bytes");
const hash = makeId(12)
this.getSlice(state, hash).then(resolve(true))
} else {
resolve(false)
}
})
})
}
private async getSlice(state: getFileState, fileHash: string): Promise<boolean> {
const result = await this.getSliceAsyncPromise(state.file, state.counter)
if (result.status == Office.AsyncResultStatus.Succeeded) {
const data = result.value.data;
if (data) {
const formData = new FormData();
formData.append("file", new Blob([data]), state.filename);
const boundary = makeId(12);
const start = state.counter * state.chunkSize
const end = (state.counter + 1) * state.chunkSize
const total = state.file.size
return await Axios.post('/upload', formData, {
headers: {
"Content-Type": `multipart/form-data; boundary=${boundary}`,
"file-chunk-id": fileHash,
"file-chunk-size": state.chunkSize,
"Content-Range": 'bytes ' + start + '-' + end + '/' + total,
},
}).then(async res => {
if (res.status === 200) {
state.counter++;
if (state.counter < state.sliceCount) {
return await this.getSlice(state, fileHash);
}
else {
this.closeFile(state);
return true
}
}
else {
return false
}
}).catch(err => {
console.log(err)
this.closeFile(state)
return false
})
} else {
return false
}
}
else {
console.log(result.status);
return false
}
}
private getSliceAsyncPromise(file: Office.File, sliceNumber: number): Promise<Office.AsyncResult<Office.Slice>> {
return new Promise(function (resolve) {
file.getSliceAsync(sliceNumber, result => resolve(result))
})
}
Server-side
This code is totally from the npm package (link above), so I'm not supposed to change anything in here, but still for reference:
makeMiddleware = () => {
return (req, res, next) => {
const busboy = new Busboy({ headers: req.headers });
busboy.on('file', (fieldName, file, filename, _0, _1) => {
if (this.fileField !== fieldName) { // Current field is not handled.
return next();
}
const chunkSize = req.headers[this.chunkSizeHeader] || 500000; // Default: 500Kb.
const chunkId = req.headers[this.chunkIdHeader] || 'unique-file-id'; // If not specified, will reuse same chunk id.
// NOTE: Using the same chunk id for multiple file uploads in parallel will corrupt the result.
const contentRangeHeader = req.headers['content-range'];
let contentRange;
const errorMessage = util.format(
'Invalid Content-Range header: %s', contentRangeHeader
);
try {
contentRange = parse(contentRangeHeader);
} catch (err) {
return next(new Error(errorMessage));
}
if (!contentRange) {
return next(new Error(errorMessage));
}
const part = contentRange.start / chunkSize;
const partFilename = util.format('%i.part', part);
const tmpDir = util.format('/tmp/%s', chunkId);
this._makeSureDirExists(tmpDir);
const partPath = path.join(tmpDir, partFilename);
const writableStream = fs.createWriteStream(partPath);
file.pipe(writableStream);
file.on('end', () => {
req.filePart = part;
if (this._isLastPart(contentRange)) {
req.isLastPart = true;
this._buildOriginalFile(chunkId, chunkSize, contentRange, filename).then(() => {
next();
}).catch(_ => {
const errorMessage = 'Failed merging parts.';
next(new Error(errorMessage));
});
} else {
req.isLastPart = false;
next();
}
});
});
req.pipe(busboy);
};
}
Update
So it looks like I have found the problem at least. busboy appears to be writing my array of bytes as text in the output file. I get 80,75,3,4,20,0,6,0,8,0,0,0,33,0,44,25 (as text) when I upload the array of bytes [80,75,3,4,20,0,6,0,8,0,0,0,33,0,44,25]. Now need to figure out how to force it to write it as a binary stream.
Figured out. Just in case it helps anyone, there was no problem with busboy or office.js or axios. I just had to convert the incoming chunk of data to Uint8Array before creating a blob from it. So instead of:
formData.append("file", new Blob([data]), state.filename);
like this:
const blob = new Blob([ new Uint8Array(data) ])
formData.append("file", blob, state.filename);
And it worked like a charm.
i am developing a role-based application using android studio, since my project needs admin rights for creating user and deleting user i used firebase admin sdk for my project. I tried to delete multiple accounts but i faced with issue. The response returns not a valid json object. As in my code i tried to handle possible errors. However the response still returns not a valid json object.
See below
index.js
exports.deleteUser = functions.https.onCall(async (data,context) => {
try {
if(!context.auth) {
throw new AuthenticationError('Kimlik Doğrulaması Yapılmamış');
}
const uids = JSON.parse(data);
console.log(uids);
const callerUid = context.auth.uid;
const callerUser = await admin.auth().getUser(callerUid);
if(!callerUser.customClaims.admin && !callerUser.customClaims.superadmin) {
throw new NotAnAdminError('Bu işlemi sadece yöneticiler gerçekleştirebilir');
}
const reference = admin.firestore().collection("Users");
const res = await admin.auth().deleteUsers(uids);
res.errors.forEach(element => console.log(element));
const successes = res.successCount;
const fails = res.failureCount;
console.log(fails);
console.log(successes);
if(fails===0) {
await uids.forEach(element => reference.doc(element).delete());
return {result:successes+' Öğrenci Silindi!'};
}else {
throw new functions.https.HttpsError('Silme Hatası','Bilinmeyen hata,
silinemeyen öğrenci sayısı: '+fails);
}
}
catch(error) {
if(error.type === 'NotAnAdminError') {
throw new functions.https.HttpsError('Bu işlemi yapma yetkiniz
yok.',error.message);
}else if(error.type === 'AuthenticationError') {
throw new functions.https.HttpsError('Kimlik Hatası',error.message);
}else {
throw new functions.https.HttpsError('internal ERROR from catch
block',error.message);
}
}
});
Android Code
private Task<String> deleteUsers(List<CheckableUser> users) {
List<String> idlist = new ArrayList<>();
for (CheckableUser user:users) {
idlist.add(user.getUid());
}
return mFunctions.getHttpsCallable("deleteUsers").call(jsonFormatted).
continueWith(new Continuation<HttpsCallableResult, String>() {
#Override
public String then(#NonNull Task<HttpsCallableResult> task) throws Exception {
HashMap<String,Object> data = task.getResult().getData();
String result = data.get("result");
Log.d(TAG, "then: "+result);
return result;
}
});
mFunctions.getHttpsCallable("deleteUsers").call(jsonFormatted).continueWith(new
Continuation<HttpsCallableResult, String>() {
#Override
public String then(#NonNull Task<HttpsCallableResult> task) throws Exception {
HashMap<String,Object> data = task.getResult().getData();
String result = data.get("result");
Log.d(TAG, "then: "+result);
return result;
}
});
}
deleteUsers(users).addOnCompleteListener(new OnCompleteListener<String>() {
#Override
public void onComplete(#NonNull Task<String> task) {
if (task.isSuccessful()) {
Snackbar snackbar =
Snackbar.make(requireView().findViewById(R.id.constraintlayout),"Selected users are
deleted",4000);
snackbar.show();
}else {
debugFirebase(task.getException());
}
}
});
private void debugFirebase(Exception e) {
if (e instanceof FirebaseFunctionsException) {
FirebaseFunctionsException ffe = (FirebaseFunctionsException) e;
Log.d(TAG, "debugFirebase: MESSAGE: "+ffe.getMessage());
Log.d(TAG, "debugFirebase: CODE: "+ffe.getCode());
Log.d(TAG, "debugFirebase: DETAILS: "+ffe.getDetails());
Log.e(TAG, "debugFirebase: EXCEPTION: ",ffe );
}
}
Exception
com.google.firebase.functions.FirebaseFunctionsException: Response is not valid
JSONobject.
Caused by: org.json.JSONException: Value <!DOCTYPE of type java.lang.String cannot be
converted to JSONObject
Your function is declared and exported as "deleteUser", but you are calling it in the Android client as "deleteUsers", which is not the same. The strings need to match.
I am using a POST method to get a byte array of an excel file
c# server side implementation:
downloadExcel() {
....
FileResultDto fileResultDto = new FileResultDto
{
Data = ExcelHelper.CreateExcelFile(excelFile) //Data contains the byte array
};
return new JsonHttpResponseMessage(JsonConvert.SerializeObject(fileResultDto));
}
CreateExcelFile():
public byte[] CreateExcelFile(ExcelFile excelFile)
{
try
{
#region Validation
if (excelFile == null)
{
throw new ArgumentNullException(nameof(excelFile));
}
#endregion
byte[] bytes;
using (ExcelPackage excelPackage = new ExcelPackage())
{
for (int i = 1; i <= excelFile.Worksheets.Count; i++)
{
Worksheet worksheet = excelFile.Worksheets[i - 1];
excelPackage.Workbook.Worksheets.Add(worksheet.Name);
ExcelWorksheet currentExcelWorksheet = excelPackage.Workbook.Worksheets[i];
if (excelFile.HasLogoTemplate)
{
byte[] imageBytes = Convert.FromBase64String(LogoBase64);
Image image;
using (MemoryStream ms = new MemoryStream(imageBytes, 0, imageBytes.Length))
{
image = Image.FromStream(ms, true);
}
ExcelPicture picture = currentExcelWorksheet.Drawings.AddPicture("Logo", image);
picture.SetPosition(0, 4, 0, 10);
currentExcelWorksheet.Row(1).Height = 50;
}
SetColumnsWidths(currentExcelWorksheet, worksheet);
WriteHeaderRow(currentExcelWorksheet, worksheet.HeaderRow);
WriteCells(currentExcelWorksheet, worksheet.Cells);
}
#region Set Excel Stream
bytes = excelPackage.GetAsByteArray();
#endregion
}
return bytes;
}
catch (Exception exception)
{
throw new Exception("There was an error on excel export. Exception: ", exception);
}
}
front end implementation:
public downloadExcel(): void {
this.myRepository.downloadExcel(this.postData).then(result => {
var byteArray = new Uint8Array(result.data.data);
var a = window.document.createElement('a');
a.href = window.URL.createObjectURL(new Blob([byteArray], { type: "application/octet-stream" }));
a.download = "test.xlsx";
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}, error => {
console.log(error);
});
}
Apparently the created blob file it seems to be corrupted. Any suggestions where the problem can be?
Finally the problem solved using 'arraybuffer' as the response type of the http request.
let requestConfiguration: ng.IRequestConfig = {
cache: ...,
data: ...,
headers: ...,
method: ...,
url: ...,
responseType: 'arraybuffer'
};
let promise: ng.IPromise<any> = this.$http(requestConfiguration);
The same is done in Angular. This might be help you
downloadExcel(){
this.downloadAttachment(filename).subscribe(res => {
let Res=res;
const downloadedFile = new Blob([Res], { type: Res.type });
const a = document.createElement('a');
a.setAttribute('style', 'display:none;');
document.body.appendChild(a);
a.download = attachment.filename;
a.href = URL.createObjectURL(downloadedFile);
a.target = '_blank';
a.click();
document.body.removeChild(a);
},
err=>{
throw err;
})
}
downloadAttachment(filename){
this.httpOptions = {
reportProgress: true,
responseType: "blob"
};
return this.http.get(API_URL, this.httpOptions).pipe(
map(response => response),
catchError(this.handleError<any>(isShowError)),
finalize(() => {
})
);
}
C# Code
var res=DownloadAttachment(filename);
if (res == null)
return Content("filename not present");
return File(res, Utility.GetContentType(filename), filename);
I'm trying to upload images to a s3 bucket as part of the application.
index.js
function upImg(req) {
if(req.files.img) {
var img = req.files.image;
var name = Math.round(Math.random()*10000).toString(); // Returns a random 5 digit number
if(myDB.uploadImg(img, name)) {
return name;
} else {
return "";
}
} else {
return "";
}
}
app.post('/newEV*', isLoggedIn, function(req, res) {
var myURL = req.path.replace('/newEV', '');
var imgPath = upImg(req);
fetch(myURL).then(function (events){
var myID;
var x = 0;
while(!myID) {
if(!events[x]) {
myID = x;
} else {
x++;
}
}
myDB.newEvent(myURL, req.body.name, req.body.desc, req.body.loc, imgPath, req.body.link, req.body.cap, req.body.date, req.body.time, myID, events);
res.redirect('/edit' + myURL);
});
});
myDB file
function signs3(file, name) {
devs3();
const s3 = new aws.S3();
const s3Params = {
Body: file,
Bucket: S3_BUCKET,
Key: name
};
s3.putObject(s3Params, function(err, data) {
if(err) {
throw err;
} else {
console.log("Data from putObject:" + JSON.stringify(data));
}
});
}
module.exports = {
uploadImg : function(file, name) {
var nName = "imgs/" + name;
console.log(nName);
signs3(file, nName);
return true;
}
}
I know that the signs3 function works because I can use it in other bits of my application to upload JSON files. Whenever I post to the URL, weirdly enough I can see in the console the 'data from putObject', however what I can't see is the nName. I don't understand this, as the console.log(nName) line should be run before the other one. When I go to look at bucket, the image hasn't uploaded (despite me getting an ETag from the console), and the page does not display it as there (I know this also works because it can display images already uploaded to the bucket).
You want to do something like this, soliciting events from the Request object created when you call putObject.
const req = s3.putObject( s3Params )
req.on('success', res => {
console.log ('upload complete! );
});
req.on ('error', res => {
console.error (res.error');
});
req.send();
Why does this appear to work differently for small files (JSON files) and large files (images)? Because the large files take longer to upload.