I switched from using 2D context to using WebGL context. I'm trying to figure out a way to wait until pixels have been drawn onto the screen.
With 2D context I would do something like this
const handlePaintFrame = () => {
const ctx: CanvasRenderingContext2D | any = videoCanvasRef?.current?.getContext("2d");
if (video.current && videoCanvasRef.current && ctx) {
if (chroma?.keyColor) {
drawChromaFrame(ctx, width, height);
} else {
ctx.drawImage(video.current, 0, 0, width, height);
}
const rendered = () => {
setTimeout(() => {
onReady("video");
}, 20);
};
const startRender = () => {
requestAnimationFrame(rendered);
};
const img = new Image();
img.onload = () => {
requestAnimationFrame(startRender);
};
img.onerror = (e: any) => {
console.log("Image error:", { e });
};
img.src = videoCanvasRef.current.toDataURL();
}
};
Currently my draw code for the WebGL context is something like this
const drawTexture = () => {
// render
if (gl && video.current) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video.current);
drawScene(locationRef.current, buffersRef.current);
}
};
const handlePaintFrame = () => {
if (video.current) {
if (chroma?.keyColor) {
// drawChromaFrame(ctx, width, height);
} else {
drawTexture();
// TODO: We need to wait until texture is drawn
setTimeout(() => {
onReady("video");
}, 100);
}
}
};
Related
I'm new to use PNG and I get below error unrecognised content at end of stream
cy.readFile("cypress/e2e/Testdata/sample.png", "binary").then((image1) => {
cy.readFile("cypress/downloads/sample.png", "binary").then((image2) => {
const img1 = PNG.sync.read(image1);
const img2 = PNG.sync.read(image2);
const { width, height } = img1;
const diff = new PNG({ width, height });
pixelmatch(img1.data, img2.data, diff.data, width, height, {
threshold: 0.1,
});
fs.writeFileSync("diff.png", PNG.sync.write(diff));
expect(diff.width).to.equal(0);
expect(diff.height).to.equal(0);
});
});
You cannot mix browser commands like cy.readfile() with node commands like fs.writeFileSync().
You will have to create a Cypress task to run this code.
module.exports = defineConfig({
e2e: {
setupNodeEvents(on, config) {
on('task', {
png({image1Path, image2Path}) {
const image1 = fs.readFileSync(image1Path)
const image2 = fs.readFileSync(image2Path)
const img1 = PNG.sync.read(image1);
const img2 = PNG.sync.read(image2);
const { width, height } = img1;
const diff = new PNG({ width, height });
pixelmatch(img1.data, img2.data, diff.data, width, height, {
threshold: 0.1,
});
fs.writeFileSync("diff.png", PNG.sync.write(diff));
return diff
},
})
},
},
})
I had a problem with changing the color of a layer in Openlayers on zoom.
During initialization, I draw a heat map with data on atmospheric pressure. And at first everything is fine.
But after zooming or moving around the map, the color of the layer becomes more transparent.
The layer is a LayerGroup in which the background layer is first added, and then the layer itself with the heat map is added.
class HeatMapLayer extends WeatherLayer {
#style;
#colorRange;
#layer;
#substrateLayer;
constructor(ol_map, options = {}) {
super(ol_map);
const {
colorRange, style,
} = options;
this.#colorRange = colorRange;
this.#style = style;
this.#substrateLayer = new SubstrateLayer(ol_map);
}
#createGeoJSONSource = features => new VectorSource({
features: this.readGeoJSONFeatures(features),
});
#createLayer = features => new VectorImage({
source: this.#createGeoJSONSource(features),
style: this.#style,
});
loadData = ({ features }) => {
this.isLoaded = true;
const layers = this.layer.getLayers();
if (!layers.values_?.length) {
this.#layer = this.#createLayer(features);
this.#layer.on("prerender", function (evt) {
evt.context.globalCompositeOperation = "multiply";
});
this.#layer.on("postrender", function (evt) {
evt.context.globalCompositeOperation = "source-over";
});
const substrateLayer = this.#substrateLayer.getLayer(this.#layer.getSource().getFeatures());
layers.push(substrateLayer);
layers.push(this.#layer);
} else {
const [substrateLayer, layer] = layers.array_;
substrateLayer.setSource(null);
const source = this.#createGeoJSONSource(features);
substrateLayer.setSource(this.#substrateLayer.getSource(source.getFeatures()))
layer.setSource(null);
layer.setSource(source);
}
}
create = ({ intervals, opacity }) => {
this.colorScale = scaleLinear()
.domain(intervals)
.range(this.#colorRange)
return super.create(this.layerTypes.vectorGroup, { opacity });
};
destroy = () => {
this.#substrateLayer.destroy();
};
}
export default HeatMapLayer;
class SubstrateLayer {
#layer;
#layerPrerenderHandler = ({ context }) => {
context.globalCompositeOperation = "color";
context.fillStyle = "white";
context.globalCompositeOperation = "source-over";
}
#layerPostrenderHandler = ({ context }) => {
context.globalCompositeOperation = "color";
context.fillStyle = "white";
context.globalCompositeOperation = "source-over";
};
#createSubstrateLayer = features => new VectorLayer({
source: new VectorSource({
features,
}),
style: new Style({
fill: new Fill({
color: '#888',
})
}),
});
#setListeners = () => {
// this.#layer.on('prerender', this.#layerPrerenderHandler)
this.#layer.on('postrender', this.#layerPostrenderHandler);
};
#removeListeners = () => {
// this.#layer.un('prerender', this.#layerPrerenderHandler)
this.#layer.un('postrender', this.#layerPostrenderHandler);
};
setVisible = value => this.#layer.setVisible(value)
getSource = features => new VectorSource({
features,
});
getLayer = features => {
if (!this.#layer) {
if (features) {
this.#layer = this.#createSubstrateLayer(features);
this.#setListeners();
return this.#layer;
}
}
};
destroy = () => {
this.#removeListeners();
}
}
export default SubstrateLayer;
I am trying to export SVG element as image blob using Canvas. When i try to invoke canvas.toDataURL("image/png") it returns empty string. I am not able to figure out why this happens.
const svg: Node = document.getElementById("flowchart-container").children[0];
var icanvas = document.getElementById("stage");
const ctx: CanvasRenderingContext2D = icanvas.getContext("2d");
var width = (svg as HTMLElement).clientWidth;
var height = (svg as HTMLElement).clientHeight;
const image: HTMLImageElement = document.createElement("img");
image.crossOrigin = "anonymous";
const imageBlob: Blob = new Blob(
[new XMLSerializer().serializeToString(svg)],
{
type: "image/svg+xml",
}
);
const blobUrl: string = URL.createObjectURL(imageBlob);
image.src = blobUrl ;
image.onload = (): void => {
//ctx.clearRect(0, 0, width, height);
ctx.drawImage(image, 0, 0, width, height);
window.URL.revokeObjectURL(blobUrl);
try {
const imageData: string = icanvas.toDataURL("image/png");
const imageFileToBeExported: Blob | File = this.dataURLtoFile(
imageData,
"flowchart.png"
);
promiseResolve(imageFileToBeExported);
} catch (error) {
console.log('Failed to Create Image');
console.log(error);
promiseReject(error);
}
};
Above code goes inside the createSVG method. Caller is the display method.
public createSVG(
promiseResolve: (data: File | Blob) => void,
promiseReject: (error: unknown) => void
): void {
//Code for Creating File
}
public async getImage() :Promise<any>{
let imageBlob: File | Blob;
try {
imageBlob = await generateImageAsynchronously();
} catch (error) {
console.log('Failed to create Image');
}
}
public generateImageAsynchronously(): Promise<File | Blob> {
return new Promise(
(
resolver: (data: File | Blob) => void,
reject: (error: unknown) => void
): void => {
this.createSVG(resolver, reject);
}
);
}
Finally the call invoked from below method
public display(){
this.getImage().then(response => {
console.log('Resolving Promise');
this.imageBlob = response;
);
}
this is a common issue when using .onload, basically you're returning the promise before the data is processed.
You can try wrapping it in a async function like this
async function waitForloading(){
let waitForLoad = await processImage()
return waitForLoad
}
function processImage(){
return new Promise(function(promiseResolve,promiseReject){
const svg: Node = document.getElementById("flowchart-container").children[0];
var icanvas = document.getElementById("stage");
const ctx: CanvasRenderingContext2D = icanvas.getContext("2d");
var width = (svg as HTMLElement).clientWidth;
var height = (svg as HTMLElement).clientHeight;
const image: HTMLImageElement = document.createElement("img");
image.crossOrigin = "anonymous";
const imageBlob: Blob = new Blob(
[new XMLSerializer().serializeToString(svg)],
{
type: "image/svg+xml",
}
);
const blobUrl: string = URL.createObjectURL(imageBlob);
image.src = blobUrl ;
image.onload = (): void => {
//ctx.clearRect(0, 0, width, height);
ctx.drawImage(image, 0, 0, width, height);
window.URL.revokeObjectURL(blobUrl);
try {
const imageData: string = icanvas.toDataURL("image/png");
const imageFileToBeExported: Blob | File = this.dataURLtoFile(
imageData,
"flowchart.png"
);
promiseResolve(imageFileToBeExported);
} catch (error) {
console.log('Failed to Create Image');
console.log(error);
promiseReject(error);
}
};
)}
}
I've created a function that takes care of resizing a image, now i want to return the resized image from the function.
I'm working with react and know i can fix the problem by using the state but i don't like this solution..
Tried returning every different scope, but i still get a undefined response. Also when i return the reader itself, i get the old data.
Interface
interface IHTMLInputEvent extends Event {
target: HTMLInputElement & EventTarget;
}
the resize function
function resizeImage(file: IHTMLInputEvent, width: number) {
const fileName = file.target.files[0].name;
const reader = new FileReader();
reader.readAsDataURL(file.target.files[0]);
reader.onload = (event) => {
const img = new Image();
img.src = event.target.result.toString();
img.onload = () => {
const elem = document.createElement('canvas');
const scaleFactor = width / img.width;
elem.width = width;
elem.height = img.height * scaleFactor;
const ctx = elem.getContext('2d');
ctx.drawImage(img, 0, 0, width, img.height * scaleFactor);
ctx.canvas.toBlob((blob) => {
return new File([blob], fileName, { type: 'image/jpeg', lastModified: Date.now() });
}, 'image/jpeg', 1);
};
};
}
function for handling the upload(so far)
function handleUpload(e: IHTMLInputEvent) {
const resizedImage = resizeImage(e, 600);
}
input field
<input
className={classes.inputForUpload}
accept='image/*'
type='file'
ref={uploadImage}
onChange={(e: IHTMLInputEvent) => handleUpload(e)}
/>
I would like to return the new created image.
You can solve this using Promise,
function resizeImage(file: IHTMLInputEvent, width: number) {
return new Promise((resolve, reject) => {
const fileName = file.target.files[0].name;
const reader = new FileReader();
reader.readAsDataURL(file.target.files[0]);
reader.onload = (event) => {
const img = new Image();
img.src = event.target.result.toString();
img.onload = () => {
const elem = document.createElement('canvas');
const scaleFactor = width / img.width;
elem.width = width;
elem.height = img.height * scaleFactor;
const ctx = elem.getContext('2d');
ctx.drawImage(img, 0, 0, width, img.height * scaleFactor);
ctx.canvas.toBlob((blob) => {
resolve(new File([blob], fileName, {
type: 'image/jpeg',
lastModified: Date.now()
}));
}, 'image/jpeg', 1);
};
};
});
}
With async, await ,
async function handleUpload(e: IHTMLInputEvent) {
const resizedImage = await resizeImage(e, 600);
// do you suff here
}
JSX,
<input
className={classes.inputForUpload}
accept='image/*'
type='file'
ref={uploadImage}
onChange={async (e: IHTMLInputEvent) => await handleUpload(e)}
/>
May I ask why you don't like the solution of using state? This seems like a pretty standard use case.
Your state could look something like this:
state = {
imageDescription: '',
imageUrl: null
};
Your action handler would simply setState upon success like so:
img.onload = () => {
...
this.setState({ imageDescription: fileName, imageSrc: img.src })
};
Finally your render function would look something like this:
render() {
const { imageDescription, imageUrl } = this.state;
return (
<Fragment>
<input
className={classes.inputForUpload}
accept='image/*'
type='file'
ref={uploadImage}
onChange={(e: IHTMLInputEvent) => handleUpload(e)}
/>
<img src={imageUrl} alt={imageDescription} />
</Fragment>
)
}
P.S. you can delete handleUpload and call resizeImage directly.
I'm trying to get the cropped image with react-image-crop module (blob). However I've problem with canvas, I guess it needs to be done asynchronously, but I'm not sure and I'm not sure if I've choosen the right approach either.
I've tried to do the same as written in documentation of react-image-crop in https://www.npmjs.com/package/react-image-crop.
I'm also trying to do the same as Stefan in this topic:
Get cropped image via react-image-crop module
state = {
image: '',
crop: {
aspect: 4/3,
x: 10,
y: 10,
width: 80,
height: 80,
},
imgSrc: null
}
getCroppedImg = (image, pixelCrop, fileName) => {
const canvas = document.createElement('canvas');
canvas.width = pixelCrop.width;
canvas.height = pixelCrop.height;
const ctx = canvas.getContext('2d');
ctx.drawImage(
image,
pixelCrop.x,
pixelCrop.y,
pixelCrop.width,
pixelCrop.height,
0,
0,
pixelCrop.width,
pixelCrop.height
);
// As a blob
return new Promise((resolve, reject) => {
canvas.toBlob(blob => {
blob.name = fileName;
resolve(blob);
}, 'image/jpeg');
});
}
handleImageUpload = e => {
const uploadData = new FormData();
uploadData.append("image", this.state.imgSrc);
service.handleUpload(uploadData)
.then(response => {
this.setState({ image: response.secure_url });
})
.catch(err => {
console.log("Error while uploading the file: ", err);
});
}
handleImagePreview = e => {
this.setState({image: URL.createObjectURL(e.target.files[0]), imgSrc: e.target.files[0]})
}
handleOnCropComplete = (crop, pixelCrop) => {
this.getCroppedImg(this.state.imgSrc, pixelCrop, 'preview.jpg')
.then((res) => {
const blobUrl = URL.createObjectURL(res);
console.log(blobUrl);
})
}
render() {
return(
<>
<input required onChange={this.handleImagePreview} type="file" />
<div className="crop-div">
<ReactCrop
src={this.state.image}
crop={this.state.crop}
onChange={this.handleOnCropChange}
onComplete={this.handleOnCropComplete} />
<button className="submit-btn" onClick={this.handleImageUpload}>Crop the image</button>
</div>
</>
)
}
After resizing the cropped area on the image I'll get this error:
"TypeError: Failed to execute 'drawImage' on 'CanvasRenderingContext2D': The provided value is not of type '(CSSImageValue or HTMLImageElement or SVGImageElement or HTMLVideoElement or HTMLCanvasElement or ImageBitmap or OffscreenCanvas)'"
Image that I'm passing is Image File Object.
A file object isn't in the list expected as stated by the error message, so you have to create an object in the list from the object you have.
In this case we'll use the HTMLImageElement.
getCroppedImg = (imageFile, pixelCrop, fileName) => {
const canvas = document.createElement('canvas');
canvas.width = pixelCrop.width;
canvas.height = pixelCrop.height;
const ctx = canvas.getContext('2d');
var image = new Image();
var promise = new Promise((resolve, reject) => {
image.onload = function(){
ctx.drawImage(
image,
pixelCrop.x,
pixelCrop.y,
pixelCrop.width,
pixelCrop.height,
0,
0,
pixelCrop.width,
pixelCrop.height
);
resolve();
};
image.src = URL.createObjectURL(imageFile);
}).then(function(){
return new Promise((resolve, reject) => {
canvas.toBlob(blob => {
blob.name = fileName;
resolve(blob);
}, 'image/jpeg');
});
});
return promise;
}