I'm trying to create a Cloudflare Worker that receives an image URL and return width and height. But I receive the message ReferenceError: Image is not defined for new Image(). Is there a workaround to make the code below work?
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
let imageUrl = new URL(request.url).searchParams.get('imageUrl')
let response = await fetch(imageUrl)
let blob = await response.blob()
let image = new Image() // <------ ReferenceError: Image is not defined
return new Promise((resolve, reject) => {
image.onload = function() {
resolve(new Response(JSON.stringify({
width: image.width,
height: image.height
}), {
headers: {
'content-type': 'application/json'
}
}))
}
image.onerror = reject
image.src = URL.createObjectURL(blob)
})
}
It seems other functions for images have a similar problem. Example: ReferenceError: createImageBitmap is not defined. So any possible solution would be great.
Image are only available in the browse.
If you want to get the width and height of an image in node.js try this
const http = require('http');
const fs = require('fs');
const gm = require('gm');
async function handleRequest() {
const filename = "image.png"
const fileURL = "https://any-page.dom/"+filename
const file = fs.createWriteStream(filename);
return new Promise((resolve, reject) => {
const request = http.get(fileURL, function(response) {
response.pipe(file);
// after download completed close filestream
file.on("finish", () => {
console.log("Download Completed");
file.close();
gm(filename)
.size(function (err, size) {
if (!err) {
console.log('width = ' + size.width);
console.log('height = ' + size.height);
resolve(new Response(JSON.stringify({
width: size.width,
height: size.height
}), {
headers: {
'content-type': 'application/json'
}
}))
} else {
reject(err)
}
});
});
});
})
}
Related
I can download any files that contains text using this function, but I also want to download images like .png and .jpg.
How do I do that using fetch?
async function fetchGithubFileContent(url) {
let res = await fetch(url).then((res) => res.text())
window.electronAPI.writeFile({ content: res, fileName: "res.js" })
}
async function writeFile({ content, fileName }) {
fs.writeFile(path.join(__dirname, fileName), content, () => console.log(`successfully saved ${fileName}`))
}
fetchGithubFileContent("https://raw.githubusercontent.com/Tetrax-10/Nord-Spotify/master/src/nord.js")
If it can't be with fetch, is it possible with axios?
Am answering my own ques as i solved it after a hour.
import { Buffer } from "buffer"
async function writeFile({ content, fileName }) {
fs.writeFile(path.join(__dirname, fileName), content, () => console.log(`successfully saved ${fileName}`))
}
async function writeImage({ binaryData, fileName }) {
let formatedBinaryData = binaryData.replace(/^data:image\/\w+;base64,/, "")
let buffer = Buffer.from(formatedBinaryData, "base64")
await writeFile({ content: buffer, fileName: fileName })
}
async function imageUrlToBase64(url) {
let response = await fetch(url)
let blob = await response.blob()
return new Promise((onSuccess, onError) => {
try {
const reader = new FileReader()
reader.onload = function () {
onSuccess(this.result)
}
reader.readAsDataURL(blob)
} catch (error) {
onError(error)
}
})
}
async function downloadGithubImage(url) {
let base64 = await imageUrlToBase64(url)
let fileName = url.split("/").pop()
window.electronAPI.writeImage({ binaryData: base64, fileName: fileName })
}
downloadGithubImage("https://raw.githubusercontent.com/Tetrax-10/Spicetify-Themes/master/assets/home.png")
Another method is by using node streams (better method)
import https from "https"
const { pipeline } = require("stream/promises")
import path from "path"
import fs from "fs"
async function download(url) {
return new Promise(async (onSuccess) => {
https.get(url, async (res) => {
let fileName = url.split("/").pop()
const fileWriteStream = fs.createWriteStream(path.join(__dirname, fileName), {
autoClose: true,
flags: "w",
})
await pipeline(res, fileWriteStream)
onSuccess("success")
})
})
}
await download("https://raw.githubusercontent.com/Tetrax-10/Spicetify-Themes/master/assets/artist-2.png")
I am seeing the following error when trying to extract an image's metadata information with the Sharp module: "Input file contains unsupported image format".
This is only happening for certain signed image urls, particularly ones that contain xmp information in the metadata.
I am hoping someone can help me spot where the issue might be occurring in this code snippet.
Here is the exact code snippet I am using (insert the signed image URL where specified in the doStuff function to test):
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
This code snippet above fails with the "Input file contains unsupported image format" on the line that extracts metadata (imageMetadata = await manipulator.metadata();)
So the strange thing is, I am able to properly extract the metadata (with no errors) with this same code if I add a short Sleep after this line: const fileStream = fs.createWriteStream(localPath);
So this code snippet (all I'm doing here is adding a short sleep after fs.createWriteSteam) allows the image metadata to be extracted without issue:
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
await sleep(1000);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
Why would this Sleep resolve my issues? I don't see any asynchronous calls being run that I would need to be waiting for to complete. Perhaps fs.createWriteStream didn't have enough time to complete its operation? But I do not have the option to await the call to fs.createWriteStream, as it is not async.
I tried to create an image object by uploading an image file of 170MB Chrome Browser.
However, the error as below is occurring.
enter image description here
What's the cause?
Is there a size constraint on creating an image object?
const loadFromFile = (file: File) => {
return new Promise((resolve, reject) => {
const reader = new CustomFileReader()
reader.onload = () => resolve(reader.result)
reader.onerror = (e) => reject(e)
reader.readAsDataURL(file)
})
}
const createImage = (dataUrl) => {
return new Promise((resolve, reject) => {
const img = new Image()
img.onload = () => resolve(img)
img.onerror = (error) => { // generate Error Event
console.log('image load error', e)
reject(e)
}
img.src = dataUrl
})
}
const load = async (file: File) => {
const dataUrl = await loadFromFile(file) // successful
const image = await createImage(dataUrl) // error
}
This is the image used for the test: https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/La_crucifixi%C3%B3n%2C_by_Juan_de_Flandes%2C_from_Prado_in_Google_Earth.jpg/2560px-La_crucifixi%C3%B3n%2C_by_Juan_de_Flandes%2C_from_Prado_in_Google_Earth.jpg
Please excuse my ignorance on node.
I need to read an image through a url and resize it through sharp.
Currently I have it like this to read local.
For example.
I want to read this image
url= "https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg"
And my current code is this.
return new Promise(async (resolve, reject) => {
const fileSystem = require('fs');
const image = fileSystem.readFileSync(directoryPath, 'base64');
const sharp = require('sharp');
const height: number = parseInt(heightString);//parameter
const width: number = parseInt(widthString);//parameter
let img = new Buffer(image, 'base64');
await sharp(img)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
resolve(resizedImageData);
})
.catch(error => {
// error handeling
reject(error);
});
});
How should the call be?
Thanks !
try this
const sharp = require('sharp');
const fs = require('fs');
function readAndSave(url, width = 300, height = 300) {
const filename = url.replace(/^.*[\\\/]/, '');
require('axios').get(url, { responseType: 'arraybuffer' })
.then((response) => {
return Buffer.from(response.data, "utf-8")
}).then((buffer) => {
return new Promise((resolve, reject) => {
sharp(buffer)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
const buf = Buffer.from(resizedImageData, 'base64');
fs.writeFile(`./${filename}`, buf, function (err) {
if (err) throw err;
});
resolve()
})
.catch(error => {
// error handeling
reject(error);
});
})
}).catch(error => {
console.log('error', error)
});
}
readAndSave('https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg');
I am progressively loading a file into a buffer, the buffer is valid, but the browser crashes when the ArrayBuffer is finished loading the file into it. What I need to do is to be able to send the pieces of the buffer buf = this.concatBuffers(buf, buffer); to the axios PUT request so I can progressively upload the file to s3, rather than load it into a single variable returned by the promise (as the memory gets exceeded).
How do I modify the link between readFileAsBuffer and the uploadFileToS3 method to do this?
This is my code so you can follow the process.
concatTypedArrays = (a, b) => {
const c = new a.constructor(a.length + b.length);
c.set(a, 0);
c.set(b, a.length);
return c;
};
concatBuffers = (a, b) =>
this.concatTypedArrays(
new Uint8Array(a.buffer || a),
new Uint8Array(b.buffer || b),
).buffer;
readFileAsBuffer = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
let buf = new ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
buf = this.concatBuffers(buf, buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
resolve(buf);
};
fileReader.onerror = err => {
reject(err);
};
});
uploadFileToS3 = fileObject => {
new Promise((resolve, reject) => {
const decodedURL = decodeURIComponent(fileObject.signedURL);
this.readFileAsBuffer(fileObject.fileRef).then(fileBuffer => {
console.log(fileBuffer);
axios
.put(decodedURL, fileBuffer, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
console.log(`${uploadPercentage}%`);
if (uploadPercentage === 100) {
console.log('complete');
}
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
uploadAllFilesToS3 = () => {
const { files } = this.state;
new Promise((resolve, reject) => {
Object.keys(files).map(idx => {
this.uploadFileToS3(files[idx])
.then(response => {
this.setState({ files: [] });
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
calcFileMD5 = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
const spark = new SparkMD5.ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
spark.append(buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
const hash = spark.end();
const checksumAWS = Buffer.from(hash, 'hex').toString('base64');
resolve(checksumAWS);
};
fileReader.onerror = err => {
reject(err);
};
});
I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.
Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.
XMLHttpRequest
const xhr = createCORSRequest('PUT', url);
if (!xhr) {
console.log('CORS not supported');
} else {
xhr.onload = function(){
if(xhr.status == 200) {
console.log('completed');
} else {
console.log('Upload error: ' + xhr.status);
}
};
xhr.onerror = function(err) {
console.log(err)
};
xhr.upload.onprogress = function(progressEvent){
console.log(progressEvent);
};
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('Content-MD5', md5_base64_binary);
xhr.setRequestHeader('Content-Encoding', 'UTF-8');
xhr.setRequestHeader('x-amz-acl', 'private');
xhr.send(file);
}
Or using axios;
uploadFileToS3 = fileObject => {
return new Promise((resolve, reject) => {
const { enqueueSnackbar } = this.props;
const decodedURL = decodeURIComponent(fileObject.signedURL);
axios
.put(decodedURL, fileObject.fileRef, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
};
Have you tried uploading your file using formData? Let the browser deal with file reading.
const data = new FormData()
data.append('file', file)
axios.put(decodedURL, data, ....)
Another option is to use axios https://github.com/axios/axios#request-config transformRequest property. And call for file reading there.