Parse-server sharp package in beforeSaveFile - javascript

I want to use this package: sharp in beforeSaveFile trigger but it doesn't work. beforeSaveFile doesn't change anything.
My codes:
Parse.Cloud.define('test', async (req) => {
try {
const Resim = Parse.Object.extend('Resim')
const obj = new Resim()
const { photo } = req.params
let uploadedFile = await new Parse.File(
'galleryFile',
{ base64: photo },
'image/png'
)
obj.set('photo', uploadedFile)
const data = await obj.save()
return data
} catch (error) {
throw error
}
})
Parse.Cloud.beforeSaveFile(async (req) => {
const image = await sharp(req.file).resize(256)
return image
})
Thanks for help.

I figured out. This is the solution:
Parse.Cloud.beforeSaveFile(async (req) => {
const file = req.file
const fileData = await file.getData()
const str = fileData.toString('base64')
const imageBuffer = Buffer.from(str, 'base64')
const newImageBuffer = await sharp(imageBuffer)
.resize(800, 800)
.webp({ quality: 70, lossless: true })
.toBuffer()
return new Parse.File(
'image',
{ base64: newImageBuffer.toString('base64') },
'image/webp'
)
})

Related

Uploading image to firebase using expo react-native

I am working on an app and I am using expo, I want to make sure each user can upload an image to firebase, and later publish this image on the profile page.
Using expo this is how I upload images:
const pickImage = async () => {
let pickerResult = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
console.log(pickerResult);
handleImagePicked(pickerResult);
};
the result in the console is:
Object {
"cancelled": false,
"height": 312,
"type": "image",
"uri": "file:///data/user/0/host.exp.exponent/cache/ExperienceData/%2540anonymous%252Fallergyn-app-77bfd368-65fd-43f9-8c34-9c35cef42c25/ImagePicker/daaa229c-c352-4994-ae18-ca2dbb3534ce.jpg",
"width": 416,
}
and this is how I upload to the firebase:
const handleImagePicked = async (pickerResult) => {
try {
if (!pickerResult.cancelled) {
setImage(pickerResult.uri);
await uploadImageAsync(pickerResult.uri);
console.log("done");
}
} catch (e) {
console.log(e);
alert("Upload failed, sorry :(");
} finally {
}
};
async function uploadImageAsync(uri) {
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError("Network request failed"));
};
xhr.responseType = "blob";
xhr.open("GET", uri, true);
xhr.send(null);
});
const ref = firebase
.storage()
.ref()
.child("images" + Math.random());
const snapshot = await ref.put(blob);
// We're done with the blob, close and release it
blob.close();
return await snapshot.ref.getDownloadURL();
}
this code works it saves the path of the image this: "file:///data/user/0/host.exp.exponent/cache/ExperienceData/%2540anonymous%252Fallergyn-app-77bfd368-65fd-43f9-8c34-9c35cef42c25/ImagePicker/daaa229c-c352-4994-ae18-ca2dbb3534ce.jpg" in the firebase under user collection using the uid of the user.
I am not sure if this is good, because I want to make sure the image itself is uploaded to firebase, I saw some threads in StackOverflow regarding this issue either too old or no answers, so I am hoping to get some sort of solution to what I need to do.
if I use
const ref = firebase
.storage()
.ref()
.child("images" + Math.random());
.putFile(uri);
this tells me that putFile is not a function. the same with put(uri)
Try this one. This function returns the path of the saved image from firebase which you will store in the user's document instead.
const handleImagePicked = async (pickerResult) => {
if (!pickerResult.cancelled) {
setImage(pickerResult.uri);
const result = await uploadImageAsync(pickerResult.uri);
if(result) {
console.log('success');
//save the result path to firestore user document
return;
}
alert("Upload failed, sorry :(");
}
};
export const uploadImageAsync = async (uri: string) => {
let filename = uri;
if (Platform.OS === 'ios') {
filename = uri.replace('file:', '');
}
const ext = filename.split('.').pop();
const path = `images/${id}.${ext}`;
const ref = firebase.storage().ref(path);
try {
const response = await fetch(filename);
const blob = await response.blob();
await ref.put(blob);
return path;
} catch {
return null;
}
};
This worked for me , using rn-fetch-blob
import launchImageLibrary from 'react-native-image-picker';
import RNFetchBlob from 'rn-fetch-blob';
import storage from '#react-native-firebase/storage';
const pickImage = () => {
let options = {
mediaType: 'photo',
quality: 0.5,
};
launchImageLibrary(options, (response) => {
console.log('Response = ', response);
uploadImagePicked(response);
});
};
const uploadImagePicked = (response) => {
if (response.fileName) {
const fileName = response.fileName;
var storageRef = storage().ref(`receiptImages/${fileName}`);
RNFetchBlob.fs.readFile(response.uri , 'base64')
.then(data => {
storageRef.putString(data, 'base64', {contentType:"image/jpg"})
.on(
storage.TaskEvent.STATE_CHANGED,
snapshot => {
console.log("snapshot: " + snapshot.state);
console.log("progress: " + (snapshot.bytesTransferred / snapshot.totalBytes) * 100);
if (snapshot.state === storage.TaskState.SUCCESS) {
console.log("Success");
}
},
error => {
console.log("image upload error: " + error.toString());
},
() => {
storageRef.getDownloadURL()
.then((downloadUrl) => {
console.log("File available at: " + downloadUrl);
})
})
})
.catch(error => {
console.log(error);
})
}
else {
console.log("Skipping image upload");
}
}

Upload Image to azure blob from a URL using azure functions in Nodejs

I have a requirement where the user wants to upload an image from a source URL let's say "https://homepages.cae.wisc.edu/~ece533/images/airplane.png" using Azure functions. Right now what I am trying to do is I am calling fetch method and pass the image URL and converting that into the blob but somehow that doesn't seem to be working. Below is the code. Is there any better way to achieve this
const { BlobServiceClient } = require("#azure/storage-blob");
const fetch = require("node-fetch");
const multipart = require("parse-multipart");
const AZURE_STORAGE_CONNECTION_STRING = process.env["AZURE_STORAGE_CONNECTION_STRING"];
module.exports = async function (context, req) {
context.log("JavaScript HTTP trigger function processed a request.");
const name =
req.query.name ||
(req.body &&
req.body.secure_url);
const responseMessage = name
? "Hello, " + name + ". This HTTP triggered function executed successfully."
: "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.";
// context.log("requested body: ", req.body);
var images = "\""+ req.body.secure_url.toString() +"\"";
context.log("Image URL : ", images);
var bodyBuffer = Buffer.from(JSON.stringify(req.body));
let header = req.headers["content-type"]
let boundary = header.split(" ")[1]
boundary = header.split("=")[1]
// var boundary = multipart.getBoundary(req.headers['content-type']);
var parts = multipart.Parse(req.body, header);
var requestOptions = {
method: 'GET'
};
fetch(images, requestOptions)
.then((response) => {
context.log("Response Blob : ",response.blob())
response.blob()
}) // Gets the response and returns it as a blob
.then((blob) => { main(blob)
}).catch(error => console.log('error', error));
async function main(blob) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const container = "sepik01-rdp-media-assets-migration";
const containerClient = await blobServiceClient.getContainerClient(container);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
context.log("File Name: ", blobName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
// const uploadBlobResponse = await blockBlobClient.upload(parts[0].secure_url, parts[0].length);
const uploadBlobResponse = await blockBlobClient.upload(blob, blob.length);
context.res = { body : responseMessage };
context.done();
}
};
when we use package node-fetch to send HTTP request, the response.body will return as Readable stream. Then we can use the stream to upload data to azure blob.
For example
const fetch = require("node-fetch");
const { BlobServiceClient } = require("#azure/storage-blob");
const mime = require("mime");
const AZURE_STORAGE_CONNECTION_STRING =""
module.exports = async function (context, req) {
const images = "https://homepages.cae.wisc.edu/~ece533/images/airplane.png";
const requestOptions = {
method: "GET",
};
const response = await fetch(images, requestOptions);
if (!response.ok)
throw new Error(`unexpected response ${response.statusText}`);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
const blobServiceClient = await BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerClient = await blobServiceClient.getContainerClient("image");
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadStream(
response.body,
4 * 1024 * 1024,
20,
{
blobHTTPHeaders: {
blobContentType: mime.getType(blobName),
},
}
);
context.res = { body: uploadBlobResponse._response.status };
};

How can I access the response headers of a request that is piped to a feedparser

I am trying to parse an RSS feed using request js and feedparser-promised libraries. I am able to parse the feed using the below code.
import Bottleneck from 'bottleneck';
const feedparser = require('feedparser-promised');
const limiter = new Bottleneck({
maxConcurrent: 1,
minTime: 333,
});
const httpOptions = {
uri: val.sourcefeedurl,
resolveWithFullResponse: true,
method: 'GET',
pool: false,
headers: {
'If-None-Match': val.etag,
'If-Modified-Since': val.LastModified,
Connection: 'keep-alive',
ciphers: 'DES-CBC3-SHA',
},
};
const response = await limiter.schedule(() => feedparser.parse(httpOptions));
But since I use the feedparser-promised library I am not able to cache the etag and Last Modified from the response headers.
I tried modifying feedparser-promised like this
'use strict';
const request = require('request');
const feedParser = require('./feedParser');
const parse = (requestOptions, feedparserOptions) => {
const metaData = {};
return new Promise((resolve, reject) => {
request.get(requestOptions).on('error', reject).on('response', async resp => {
if (resp.statusCode === 304) {
reject('Source not modified');
} else if (resp.statusCode === 200) {
metaData.etagin = await resp.headers.etag;
metaData.LastModifiedin = await resp.headers['last-modified'];
metaData.LastModifiedLocal = await resp.headers['last-modified'];
// console.log(metaData);
}
}).pipe(feedParser(feedparserOptions).on('error', reject).on('response', resolve));
});
};
module.exports = {
parse
};
Below is the feedParser file
'use strict';
const FeedParserStream = require('feedparser');
module.exports = (feedparserOptions, metaData) => {
// console.log(metaData, 'herre');
const parsedItems = [];
const feedparser = new FeedParserStream(feedparserOptions);
// console.log(feedparser);
feedparser.on('readable', () => {
// console.log(resp);
let item;
while (item = feedparser.read()) {
parsedItems.push(item);
}
return parsedItems;
}).on('end', function next() {
this.emit('response', parsedItems);
});
return feedparser;
};
So my question is how do I return the response headers along with the parsedItems (as in the code) while resolving the promise.
Help is very much appreciated.
Pass the metaData on end like
'use strict';
const FeedParserStream = require('feedparser');
module.exports = (feedparserOptions, metaData) => {
// console.log(metaData, 'herre');
const parsedItems = [];
const feedparser = new FeedParserStream(feedparserOptions);
// console.log(feedparser);
feedparser.on('readable', () => {
// console.log(resp);
let item;
while (item = feedparser.read()) {
parsedItems.push(item);
}
return parsedItems;
}).on('end', function next() {
this.emit('response', { parsedItems, metaData });
});
return feedparser;
};
and your feed-parser promised as
'use strict';
const request = require('request');
const feedParser = require('./feedParser');
const parse = (requestOptions, feedparserOptions) => {
const metaData = {};
return new Promise((resolve, reject) => {
request.get(requestOptions).on('error', reject).on('response', async resp => {
if (resp.statusCode === 304) {
reject('Source not modified');
} else if (resp.statusCode === 200) {
metaData.etagin = await resp.headers.etag;
metaData.LastModifiedin = await resp.headers['last-modified'];
metaData.LastModifiedLocal = await resp.headers['last-modified'];
// console.log(metaData);
}
}).pipe(feedParser(feedparserOptions, metaData).on('error', reject).on('response', resolve));
});
};
module.exports = {
parse
};

Convert image path to blob react native

Problem
I am trying to create an app with react native and firebase. One of the features I would like for this app is the ability to upload images. I am having some trouble uploading the images to firebase storage though. I am using expo's image picker to find the path of the image that the user wants to upload, but once I have the path I don't know how to convert that to something I can upload to firebase.
Can somebody help me convert the path of an image to something I can upload to firebase storage with react native?
What I've tried
I tried using:
_pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
MediaTypeOptions: 'Images',
quality: 0.4,
_uploadAsByteArray = async (pickerResultAsByteArray, progressCallback) => {
try {
var metadata = {
contentType: 'image/jpeg',
};
var storageRef = firebase.storage().ref();
var ref = storageRef.child('images/'+expoID+'/'+this.state.time)
let uploadTask = ref.put(pickerResultAsByteArray, metadata)
uploadTask.on('state_changed', function (snapshot) {
progressCallback && progressCallback(snapshot.bytesTransferred / snapshot.totalBytes)
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
}, function (error) {
console.log("in _uploadAsByteArray ", error)
}, function () {
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("_uploadAsByteArray ", uploadTask.snapshot.downloadURL)
this.setState({imageUploaded:true})
});
} catch (ee) {
console.log("when trying to load _uploadAsByteArray ", ee)
}
}
convertToByteArray = (input) => {
var binary_string = this.atob(input);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes
}
atob = (input) => {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
let str = input.replace(/=+$/, '');
let output = '';
if (str.length % 4 == 1) {
throw new Error("'atob' failed: The string to be decoded is not correctly encoded.");
}
for (let bc = 0, bs = 0, buffer, i = 0;
buffer = str.charAt(i++);
~buffer && (bs = bc % 4 ? bs * 64 + buffer : buffer,
bc++ % 4) ? output += String.fromCharCode(255 & bs >> (-2 * bc & 6)) : 0
) {
buffer = chars.indexOf(buffer);
}
return output;
}
uploadImage(bsfdata){
this.setState({imageUploaded:false})
this._uploadAsByteArray(this.convertToByteArray(bsfdata), (progress) => {
this.setState({ progress:progress })
})
}
base64:true,
});
/* if (!result.cancelled) {
this.setState({ image: result.uri });
let formData = new FormData();
formData.append('photo', {
uri,
name: `photo.${fileType}`,
type: `image/${fileType}`,
});}*/
this.uploadImage(result.base64);
};
}
I've tried it with the commented code added, which doesn't upload anything, and I've tried it with how the code is now, which gives me the error Can currently only create a Blob from other Blobs, and the uploading progress never gets above 0%.
If you are using expo (>=26), then you can do it easily with the following lines of code.
uploadImage = async(imageUri) => {
const response = await fetch(imageUri);
const blob = await response.blob();
var ref = firebase.storage().ref().child("image.jpg");
return ref.put(blob);
}
Reference: https://youtu.be/KkZckepfm2Q
Refer this link - https://github.com/dailydrip/react-native-firebase-storage/blob/master/src/App.js#L43-L69
Following block of code is working fine.
uploadImage(uri, mime = 'application/octet-stream') {
return new Promise((resolve, reject) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let uploadBlob = null
const imageRef = FirebaseClient.storage().ref('images').child('image_001')
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
resolve(url)
})
.catch((error) => {
reject(error)
})
})
}
You need to install rn-fetch-blob module:
npm install --save rn-fetch-blob
Then, do the following:
import RNFetchBlob from 'rn-fetch-blob';
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
function uploadImage(path) {
const imageFile = RNFetchBlob.wrap(path);
// 'path/to/image' is where you wish to put your image in
// the database, if you would like to put it in the folder
// 'subfolder' inside 'mainFolder' and name it 'myImage', just
// replace it with 'mainFolder/subfolder/myImage'
const ref = firebase.storage().ref('path/to/image');
var uploadBlob = null;
Blob.build(imageFile, { type: 'image/jpg;' })
.then((imageBlob) => {
uploadBlob = imageBlob;
return ref.put(imageBlob, { contentType: 'image/jpg' });
})
.then(() => {
uploadBlob.close();
return ref.getDownloadURL();
})
.((url) => {
// do something with the url if you wish to
})
.catch(() => {
dispatch({
type: UPDATE_PROFILE_INFO_FAIL,
payload: 'Unable to upload profile picture, please try again'
});
});
}
Please do ask if there's any part of the code that you don't understand. To upload multiple images, simply wrap this code with a for loop. Or if you want to make sure that every image is uploaded without any error, use Promise
Not sure whom this might help, but if you're using MediaLibrary to load images from the gallery, then the uri comes in the format of uri = file:///storage/emulated/0/DCIM/Camera/filename.jpg
In this case, using fetch(uri) didn't help me get the blob.
But if you use fetch(uri.replace("file:///","file:/")) and then follow #sriteja Sugoor's answer, you'll be able to upload the file blob.
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
let uploadBlob;
await fs
.readFile(params?.file.path, 'base64')
.then((data) => {
return Blob.build(data, {type: `BASE64`});
})
.then((blob) => {
uploadBlob = blob;
console.log(uploadBlob, 'uploadBlob');
});

Upload entire directory tree to S3 using AWS sdk in node js

I currently upload single objects to S3 using like so:
var options = {
Bucket: bucket,
Key: s3Path,
Body: body,
ACL: s3FilePermissions
};
S3.putObject(options,
function (err, data) {
//console.log(data);
});
But when I have a large resources folder for example, I use the AWS CLI tool.
I was wondering, is there a native way to do the same thing with the aws sdk (upload entire folders to s3)?
Old-school recursive way I whipped up in a hurry. Only uses core node modules and standard AWS sdk.
var AWS = require('aws-sdk');
var path = require("path");
var fs = require('fs');
const uploadDir = function(s3Path, bucketName) {
let s3 = new AWS.S3();
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync(s3Path, function(filePath, stat) {
let bucketPath = filePath.substring(s3Path.length+1);
let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) };
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName);
}
});
});
};
uploadDir("path to your folder", "your bucket name");
Special thanks to Ali from this post with helping get the filenames
async/await + Typescript
If you need a solution that uses modern JavaScript syntax and is compatible with TypeScript, I came up with the following code. The recursive getFiles is borrowed from this answer (After all that years, recursion still gives me headache, lol).
import { promises as fs, createReadStream } from 'fs';
import * as path from 'path';
import { S3 } from 'aws-sdk';
async function uploadDir(s3Path: string, bucketName: string) {
const s3 = new S3();
// Recursive getFiles from
// https://stackoverflow.com/a/45130990/831465
async function getFiles(dir: string): Promise<string | string[]> {
const dirents = await fs.readdir(dir, { withFileTypes: true });
const files = await Promise.all(
dirents.map((dirent) => {
const res = path.resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
})
);
return Array.prototype.concat(...files);
}
const files = (await getFiles(s3Path)) as string[];
const uploads = files.map((filePath) =>
s3
.putObject({
Key: path.relative(s3Path, filePath),
Bucket: bucketName,
Body: createReadStream(filePath),
})
.promise()
);
return Promise.all(uploads);
}
await uploadDir(path.resolve('./my-path'), 'bucketname');
here is a cleaned up/debugged/working version of #Jim's solution
function uploadArtifactsToS3() {
const artifactFolder = `logs/${config.log}/test-results`;
const testResultsPath = './test-results';
const walkSync = (currentDirPath, callback) => {
fs.readdirSync(currentDirPath).forEach((name) => {
const filePath = path.join(currentDirPath, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
};
walkSync(testResultsPath, async (filePath) => {
let bucketPath = filePath.substring(testResultsPath.length - 1);
let params = {
Bucket: process.env.SOURCE_BUCKET,
Key: `${artifactFolder}/${bucketPath}`,
Body: fs.readFileSync(filePath)
};
try {
await s3.putObject(params).promise();
console.log(`Successfully uploaded ${bucketPath} to s3 bucket`);
} catch (error) {
console.error(`error in uploading ${bucketPath} to s3 bucket`);
throw new Error(`error in uploading ${bucketPath} to s3 bucket`);
}
});
}
I was just contemplating this problem the other day, and was thinking something like this:
...
var async = require('async'),
fs = require('fs'),
path = require("path");
var directoryName = './test',
directoryPath = path.resolve(directoryName);
var files = fs.readdirSync(directoryPath);
async.map(files, function (f, cb) {
var filePath = path.join(directoryPath, f);
var options = {
Bucket: bucket,
Key: s3Path,
Body: fs.readFileSync(filePath),
ACL: s3FilePermissions
};
S3.putObject(options, cb);
}, function (err, results) {
if (err) console.error(err);
console.log(results);
});
Here's a version that contains a Promise on the upload method. This version allows you to perform an action when all uploads are complete Promise.all().then...
const path = require('path');
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const directoryToUpload = 'directory-name-here';
const bucketName = 'name-of-s3-bucket-here';
// get file paths
const filePaths = [];
const getFilePaths = (dir) => {
fs.readdirSync(dir).forEach(function (name) {
const filePath = path.join(dir, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
filePaths.push(filePath);
} else if (stat.isDirectory()) {
getFilePaths(filePath);
}
});
};
getFilePaths(directoryToUpload);
// upload to S3
const uploadToS3 = (dir, path) => {
return new Promise((resolve, reject) => {
const key = path.split(`${dir}/`)[1];
const params = {
Bucket: bucketName,
Key: key,
Body: fs.readFileSync(path),
};
s3.putObject(params, (err) => {
if (err) {
reject(err);
} else {
console.log(`uploaded ${params.Key} to ${params.Bucket}`);
resolve(path);
}
});
});
};
const uploadPromises = filePaths.map((path) =>
uploadToS3(directoryToUpload, path)
);
Promise.all(uploadPromises)
.then((result) => {
console.log('uploads complete');
console.log(result);
})
.catch((err) => console.error(err));
You might try the node-s3-client.
UPDATE: Available on npm here
From the sync a directory to s3 docs:
UPDATE: Added client inialization code.
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "YOUR ACCESS KEY",
secretAccessKey: "YOUR SECRET ACCESS KEY"
}
});
var params = {
localDir: "some/local/dir",
deleteRemoved: true, // default false, whether to remove s3 objects
// that have no corresponding local file.
s3Params: {
Bucket: "s3 bucket name",
Prefix: "some/remote/dir/",
// other options supported by putObject, except Body and ContentLength.
// See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
},
};
var uploader = client.uploadDir(params);
uploader.on('error', function(err) {
console.error("unable to sync:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
This works for me (you'll need to add walkSync package):
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
const syncS3Directory = async (s3Path, endpoint) => {
await asyncForEach(walkSync(s3Path, {directories: false}), async (file) => {
const filePath = Path.join(s3Path, file);
const fileContent = fs.readFileSync(filePath);
const params = {
Bucket: endpoint,
Key: file,
Body: fileContent,
ContentType: "text/html",
};
let s3Upload = await s3.upload(params).promise();
s3Upload ? undefined : Logger.error("Error synchronizing the bucket");
});
console.log("S3 bucket synchronized!");
};
const AWS = require("aws-sdk");
const fs = require("fs");
const path = require("path");
const async = require("async");
const readdir = require("recursive-readdir");
// AWS CRED
const ID = "<accessKeyId>";
const SECRET = "<secretAccessKey>";
const rootFolder = path.resolve(__dirname, "../");
const uploadFolder = "./sources";
// The name of the bucket that you have created
const BUCKET_NAME = "<Bucket_Name>";
const s3 = new AWS.S3({
accessKeyId: ID,
secretAccessKey: SECRET
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function uploadToS3(uploadPath) {
const filesToUpload = await getFiles(path.resolve(rootFolder, uploadPath));
console.log(filesToUpload);
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async file => {
const Key = file.replace(`${rootFolder}/`, "");
console.log(`uploading: [${Key}]`);
return new Promise((res, rej) => {
s3.upload(
{
Key,
Bucket: BUCKET_NAME,
Body: fs.readFileSync(file)
},
err => {
if (err) {
return rej(new Error(err));
}
res({ result: true });
}
);
});
}),
err => {
if (err) {
return reject(new Error(err));
}
resolve({ result: true });
}
);
});
}
uploadToS3(uploadFolder)
.then(() => {
console.log("upload complete!");
process.exit(0);
})
.catch(err => {
console.error(err.message);
process.exit(1);
});

Categories

Resources