I'm trying to accomplish the following task: I need to download image from url, then upload it to S3 storage and return the location of the uploaded file. I'm using async/await functions to do the task, but it returns Promise { pending } and after few seconds returns the location, i want to return location after promise is resolved. Here is my code:
// Space config
const spaceEndPoint = new AWS.Endpoint("fra1.digitaloceanspaces.com");
const s3 = new AWS.S3({
endpoint: spaceEndPoint,
accessKeyId: "xxxxxxxxx",
secretAccessKey: "xxxxxxxxxxxxxxxxx",
});
// Download image from url
const downloadImage = async (url) => {
try {
const file = axios
.get(url, {
responseType: "stream",
})
.then((res) => res.data)
.catch((err) => console.log(err));
return file;
} catch (err) {
console.log(err);
}
};
// Upload to space
const upload = async (fileUrl) => {
// Get file name from url
const fileName = path.basename(fileUrl);
// Path to save tmp file
const localFilePath = path.resolve(__dirname, "../downloads", fileName);
// Download file
const file = await downloadImage(fileUrl);
// Write file to disk
await file.pipe(fs.createWriteStream(localFilePath));
// Upload params
const params = {
Bucket: "sunday",
Body: fs.createReadStream(localFilePath),
Key: path.basename(fileName),
ContentType: "application/octet-stream",
ACL: "public-read",
};
const { Location } = await s3.upload(params).promise();
return Location;
};
console.log(
upload(
"https://i.pinimg.com/474x/e9/62/7c/e9627ce6fe731ba49597d3a83e21e398.jpg"
).then((data) => data)
);
// Result:
Promise { <pending> }
https://sunday.fra1.digitaloceanspaces.com/e9627ce6fe731ba49597d3a83e21e398.jpg
So i want to return location when promise is resolved.
Thanks in advance for help!
Your function upload is async and thus always returns a promise that should be awaited too. await your upload call. If you're in environment that doesn't support top level await, use .then to log results instead or put outer logging code in helper function.
Related
I'm trying to delete an image from Firebase storage in React Native by downloadURL. Also i'm using web V9.
Writing down most essential chuncks of code:
import {
deleteObject,
getStorage,
ref
} from "firebase/storage";
...
// DownloadURL
const photo = "https://firebasestorage.googleapis.com.... ";
const storage = getStorage();
...
const storageRef = ref(storage, photo);
deleteObject(storageRef)
.then(() => {
console.log("File deleted successfully");
})
.catch((error) => {
console.log(error.message);
});
My code is inspired from here and I got this error:
[Unhandled promise rejection: FirebaseError: Firebase Storage: The
operation 'deleteObject' cannot be performed on a root reference,
create a non-root reference using child, such as .child('file.png').
(storage/invalid-root-operation)]
I'm little confused.
Later edit:
My function which upload the image and get the download url:
// uri is returned from "expo-image-picker"
const uploadImageAsync = async (uri) => {
// manipulateAsync imported from 'expo-image-manipulator'
const manipResult = await manipulateAsync(uri, [], {
compress: 0.6,
format: "jpeg",
});
const response = await fetch(manipResult.uri);
const blob = await response.blob();
const imageName = Date.now().toString();
const fileRef = ref(storage, `avatar/${user.uid}/${imageName}.jpg`);
const result = await uploadBytes(fileRef, blob);
getDownloadURL(fileRef).then((snapshot) => {
// downloadURL is added to Redux state to be used later
dispatch(addPhoto(snapshot));
});
};
I am writing a function that downloads and converts a pdf into individual jpg files by page. I am using the imagemagick library to do the conversion. I am having trouble with my processPDF() function as it immediately returns undefined. I put a console.log statement immediately before the function returns and it returns the exact value I expect yet that value doesn't seem to be getting outside of the function for some reason.
import im from 'imagemagick'
import { promises as fs } from 'fs'
import path from 'path'
import _ from 'lodash'
import axios from 'axios'
import { v4 as uuid } from 'uuid'
async function processPDF(pdfPath) {
let basename = path.basename(pdfPath, '.pdf')
let outputPath = "./img/" + basename + ".jpg";
console.log(`Converting ${pdfPath}`)
// Take PDF file and generate individual JPG files
await im.convert(["-density", 300, pdfPath, outputPath],async (err) => {
if (err) {
console.log(err)
throw `Couldn't Process ${pdfPath}`
}
else {
// Get every file in Temporary Image Directory
let files = await fs.readdir(`./img/`)
// Append directory into filenames
files = files.map(file => {
return "./img/" + file
})
// We only want the files that match the source pdf's name
files = files.filter((file) => {
return file.includes(basename)
})
console.log(`Getting ${basename} Buffer Data`)
// For each file, read and return the buffer data along with the path
let images = await Promise.all(files.map(async file => {
const contents = await fs.readFile(file)
return { path: file, buffer: contents }
}))
// Since we read the files asynchonously, Reorder the files
images = _.orderBy(images, (image) => {
let regex = /\d*.jpg/
let res = image.path.match(regex)[0]
res = path.basename(res, '.jpg')
return res
})
let output = { pdf: pdfPath, images }
// Returns a value
console.log(output)
// Returns undefined???
return output
}
})
}
export async function downloadAndProcessPDF(url) {
// Fetch PDF from server
let { data } = await axios.get(url, {
responseType: 'arraybuffer',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/pdf'
}
}).catch(e=>{
console.log(e);
throw `Can't retrieve ${url}`
})
// Generate a Unique ID for the pdf since this is called asynchronously, this will be called many times simultaneously
let id = "./pdf/" + uuid() + ".pdf"
await fs.writeFile(id, data);
// tell processPDF to process the pdf in the ./pdf directory with the given filename
let pdfData = await processPDF(id);
// Returns undefined???
console.log(pdfData)
return pdfData
}
If I had to take a wild guess I'd think that im.convert is the function that is giving me trouble. Throughout my source code i'm using promises to handle asynchronous tasks yet im.convert() uses a callback function. I'm not super familiar with how concurrency works between promises and callback functions so I think that's what's probably the issue.
I am trying to migrate to S3, right now I serving the images trough URLs to other hostings, can I download it, change it's size while I am at it (to save on size and serve them faster), and upload it to S3 without saving a copy on my machine?
I made this for a project I am working on, and want to use CDN / Save data - might be useful to others as well, editing and suggestion are welcome.
// configure your env file
require("dotenv").config({ path: __dirname + "/../.env" });
const fetch = require("node-fetch");
const sharp = require("sharp");
const AWS = require("aws-sdk");
// endpoint example for DO (using S3 API)
const spacesEndpoint = new AWS.Endpoint("sfo3.digitaloceanspaces.com");
const s3 = new AWS.S3({
endpoint: spacesEndpoint,
accessKeyId: process.env.DO_SPACES_KEY,
secretAccessKey: process.env.DO_SPACES_SECRET,
});
function ShrinkSize(path, imageFit = "fill", width = 235, height = 320,) {
const resizeOptions = {
fit: imageFit,
};
const image = sharp(path).resize(width, height, resizeOptions)
.withMetadata()
.toBuffer({resolveWithObject: true})
return image;
}
async function uploadImage(buffer, path, name, fileType) {
const key = `${path}/${name}.${fileType}`
var params = {
Bucket: "your-bucket",
Key: key,
Body: buffer,
ACL: "public-read",
ContentType: `image/${fileType}`,
Metadata: {
"x-amz-meta-my-key": "your-value",
},
};
s3.putObject(params, function (err, data) {
if (err) {
console.log(err, err.stack);
throw new Error(`Failed to upload ${key}`)
}
});
console.log(`Succesfully uploaded to ${key}, returned Key`);
return key;
}
async function DownloadShrinkUpload(url, path, name, objectFit="cover") {
const res = await fetch(url);
const resBuffer = await res.buffer();
const shrinkedImage = await ShrinkSize(resBuffer);
const resKey = uploadImage(shrinkedImage.data, path, name, shrinkedImage.info.format);
return resKey;
}
// example
DownloadShrinkUpload(
"https://upload.wikimedia.org/wikipedia/commons/d/de/Wikipedia_Logo_1.0.png", 'images','wikipedia_logo');
i have a bunch of VHD files stored on a private Server, which are accessible through a url.
I am trying upload these vhd files directly to my azure storage account using the azure javascript npm libraries. The vhds have to be uploaded as page-blobs. I tried using the method uploadPagesFromURL() of the pageblobClient but with no success. My code looks roughly like this:
async function uploadVHD(accessToken, srcUrl)
{
try {
// Get credentials from accessToken
const creds = new StorageSharedKeyCredential(storageAccount.name, storageAccount.key);
// Get blobServiceClient
const blobServiceClient = new BlobServiceClient(`https://${storageAccount.name}.blob.core.windows.net`, creds);
// Create Container
const containerClient = blobServiceClient.getContainerClient("vhd-images");
await containerClient.createIfNotExists();
const src = srcUrl.replace('https://', 'https://username:password#');
// Upload to blob storage
const pageBlobClient = containerClient.getPageBlobClient("Test.vhd");
// Get fileSize of vhd
const fileSize = (await axiosRequest(src, { method: "HEAD" })).headers["content-length"];
const uploadResponse = await pageBlobClient.uploadPagesFromURL(src, 0, 0, fileSize);
return uploadResponse;
} catch (error) {
return error;
}
});
It is not possible to upload the Page Blob with your URL directly. You need to read data from the url. Then upload using uploadPages method.
axios.get(URL, {
responseType: 'arraybuffer'
})
.then((response) => {
console.log(response.data)
console.log(response.data.length)
// upload page blob...
}).catch((error) => {
//handle error
});
// uploadPages method
const uploadResponse = pageBlobClient.uploadPages(data, 0, dataLength);
I am creating a file backup between Google Drive and AWS S3. Where I create a Readable stream promise by downloading the file using the Google Get API and Pipping the data to AWS S3.
As I have many files, each promise is added to a queue and only new promises enter when it resolves.
I'm struggling to only resolve the promise when the file has completed upload to AWS S3, rather than when the file has downloaded?
I thought using .on('finish', () => {resolve()}) should do this but it doesn't seem to be working.
Here is my code sample:
// download stream of NON gdocs files and pipe to destination
const getGFileContent = async (fileObj) => {
let fileExt = fileObj.path.join('/').concat('/',fileObj.name)
return drive.files.get({fileId: fileObj.id, mimeType: fileObj.mimeType, alt: 'media'}, {responseType: 'stream'})
.then(res => {
return new Promise((resolve, reject) => {
res.data
.pipe(uploadS3(fileExt))
.on('end', () => {console.log(`Done downloading file: ${fileExt}`)})
.on('finish', () => {resolve(console.log(`File Backup Complete: ${fileExt}`))})
.on('error', err => {reject(console.error(`Error downloading file: ${err}`))})
})
// upload a file to AWS S3 by passing the file stream from getGFileContent into the 'body' parameter of the upload
const uploadS3 = (filePath) => {
let pass = new stream.PassThrough()
let params = {
Bucket: awsBucketName, // bucket-name
Key: filePath, // file will be saved as bucket-name/[uniquekey.csv]
Body: pass // file data passed through stream
}
new aws.S3().upload(params).promise()
.then(() => console.log(`Successfully uploaded to S3: ${filePath}`))
.catch( err => console.log(`Error, unable to upload to S3: ${err}`))
return pass
}
The first thing that comes to mind is to make uploadS3 function async and await for the upload to finish, before returning the passThrough stream. But this wouldn't work. It would then return a Promise and the .pipe() accepts only a stream object.
Instead of that, you could refactor your code so that getGFileContent would return a readable stream promise.
Then, make uploadS3 accept a readable stream as a parameter and return an s3 upload promise.
To wrap it up, add an async backupFile function, which will await for both GDrive steam and upload promises to be resolved before continuing. This will also keep the functions tidy and clean, each having its own single responsibility.
Example code:
const AWS = require('aws-sdk');
const fs = require('fs');
const s3 = new AWS.S3();
AWS.config.update({
accessKeyId: '----',
secretAccessKey: '----',
});
const backupFile = async (file) => {
const fileStream = await getGFileStream(file);
try {
await uploadStreamToS3(fileStream);
console.log(`S3 Backup of ${fileStream.path} completed`)
} catch (err) {
console.log(`error during file upload ${err}`);
}
}
const getGFileStream = async (fileObj) => {
// TODO: logic to find and get the file. Returns a readableStream promise
const fileStream = fs.createReadStream('./largeFile.zip');
console.log('File ${...} read from Google Drive');
return fileStream;
}
const uploadStreamToS3 = (fileStream) => {
const params = {Bucket: 'test-bucket', Key: 'key', Body: fileStream}
console.log(`Starting to upload ${fileStream.path} to S3`);
return s3.upload(params).promise();
}
backupFile({id: 'mockTestFile'});