Change layer color on zoom in OpenLayers - javascript

I had a problem with changing the color of a layer in Openlayers on zoom.
During initialization, I draw a heat map with data on atmospheric pressure. And at first everything is fine.
But after zooming or moving around the map, the color of the layer becomes more transparent.
The layer is a LayerGroup in which the background layer is first added, and then the layer itself with the heat map is added.
class HeatMapLayer extends WeatherLayer {
#style;
#colorRange;
#layer;
#substrateLayer;
constructor(ol_map, options = {}) {
super(ol_map);
const {
colorRange, style,
} = options;
this.#colorRange = colorRange;
this.#style = style;
this.#substrateLayer = new SubstrateLayer(ol_map);
}
#createGeoJSONSource = features => new VectorSource({
features: this.readGeoJSONFeatures(features),
});
#createLayer = features => new VectorImage({
source: this.#createGeoJSONSource(features),
style: this.#style,
});
loadData = ({ features }) => {
this.isLoaded = true;
const layers = this.layer.getLayers();
if (!layers.values_?.length) {
this.#layer = this.#createLayer(features);
this.#layer.on("prerender", function (evt) {
evt.context.globalCompositeOperation = "multiply";
});
this.#layer.on("postrender", function (evt) {
evt.context.globalCompositeOperation = "source-over";
});
const substrateLayer = this.#substrateLayer.getLayer(this.#layer.getSource().getFeatures());
layers.push(substrateLayer);
layers.push(this.#layer);
} else {
const [substrateLayer, layer] = layers.array_;
substrateLayer.setSource(null);
const source = this.#createGeoJSONSource(features);
substrateLayer.setSource(this.#substrateLayer.getSource(source.getFeatures()))
layer.setSource(null);
layer.setSource(source);
}
}
create = ({ intervals, opacity }) => {
this.colorScale = scaleLinear()
.domain(intervals)
.range(this.#colorRange)
return super.create(this.layerTypes.vectorGroup, { opacity });
};
destroy = () => {
this.#substrateLayer.destroy();
};
}
export default HeatMapLayer;
class SubstrateLayer {
#layer;
#layerPrerenderHandler = ({ context }) => {
context.globalCompositeOperation = "color";
context.fillStyle = "white";
context.globalCompositeOperation = "source-over";
}
#layerPostrenderHandler = ({ context }) => {
context.globalCompositeOperation = "color";
context.fillStyle = "white";
context.globalCompositeOperation = "source-over";
};
#createSubstrateLayer = features => new VectorLayer({
source: new VectorSource({
features,
}),
style: new Style({
fill: new Fill({
color: '#888',
})
}),
});
#setListeners = () => {
// this.#layer.on('prerender', this.#layerPrerenderHandler)
this.#layer.on('postrender', this.#layerPostrenderHandler);
};
#removeListeners = () => {
// this.#layer.un('prerender', this.#layerPrerenderHandler)
this.#layer.un('postrender', this.#layerPostrenderHandler);
};
setVisible = value => this.#layer.setVisible(value)
getSource = features => new VectorSource({
features,
});
getLayer = features => {
if (!this.#layer) {
if (features) {
this.#layer = this.#createSubstrateLayer(features);
this.#setListeners();
return this.#layer;
}
}
};
destroy = () => {
this.#removeListeners();
}
}
export default SubstrateLayer;

Related

unrecognised content at end of stream with PNG

I'm new to use PNG and I get below error unrecognised content at end of stream
cy.readFile("cypress/e2e/Testdata/sample.png", "binary").then((image1) => {
cy.readFile("cypress/downloads/sample.png", "binary").then((image2) => {
const img1 = PNG.sync.read(image1);
const img2 = PNG.sync.read(image2);
const { width, height } = img1;
const diff = new PNG({ width, height });
pixelmatch(img1.data, img2.data, diff.data, width, height, {
threshold: 0.1,
});
fs.writeFileSync("diff.png", PNG.sync.write(diff));
expect(diff.width).to.equal(0);
expect(diff.height).to.equal(0);
});
});
You cannot mix browser commands like cy.readfile() with node commands like fs.writeFileSync().
You will have to create a Cypress task to run this code.
module.exports = defineConfig({
e2e: {
setupNodeEvents(on, config) {
on('task', {
png({image1Path, image2Path}) {
const image1 = fs.readFileSync(image1Path)
const image2 = fs.readFileSync(image2Path)
const img1 = PNG.sync.read(image1);
const img2 = PNG.sync.read(image2);
const { width, height } = img1;
const diff = new PNG({ width, height });
pixelmatch(img1.data, img2.data, diff.data, width, height, {
threshold: 0.1,
});
fs.writeFileSync("diff.png", PNG.sync.write(diff));
return diff
},
})
},
},
})

MediaPipe Hands gives typeError: hands.Hands is not a constructor

I am trying to build a React widget using Googles MediaPipe Hands library.
I get the following error:
typeError: hands.Hands is not a constructor
Here is my code!
const LandmarkExtractionComponent = (): JSX.Element => {
useEffect(() => {
const videoElement = document.getElementById("input_video") as HTMLVideoElement;
const canvasElement = document.getElementById("output_canvas") as HTMLCanvasElement;
const canvasCtx = canvasElement.getContext("2d");
if (!canvasCtx) {
return;
}
const onResults = (results: mpHands.Results): void => {
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);
if (results.multiHandLandmarks) {
for (const landmarks of results.multiHandLandmarks) {
drawingUtils.drawConnectors(canvasCtx, landmarks, mpHands.HAND_CONNECTIONS, {
color: "#00FF00",
lineWidth: 5
});
drawingUtils.drawLandmarks(canvasCtx, landmarks, { color: "#FF0000", lineWidth: 2 });
}
}
canvasCtx.restore();
};
const hands = new mpHands.Hands({
locateFile: (_file: string) => {
return `https://cdn.jsdelivr.net/npm/#mediapipe/hands/${file}`;
}
});
hands.setOptions({
maxNumHands: 1,
modelComplexity: 1,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5
});
hands.onResults(onResults);
const camera = new cameraUtils.Camera(videoElement, {
onFrame: async () => {
await hands.send({ image: videoElement });
},
width: 1280,
height: 720
});
camera.start();
}, []);
return (
<div>
<UserInterfaceComponent />
</div>
);
};
export default LandmarkExtractionComponent;
This is just the basic example used by MediaPipe!
Hope someone can help...
Would you try to get the Hands from the cdn instead of npm package

Wait until WebGL has drawn onto screen

I switched from using 2D context to using WebGL context. I'm trying to figure out a way to wait until pixels have been drawn onto the screen.
With 2D context I would do something like this
const handlePaintFrame = () => {
const ctx: CanvasRenderingContext2D | any = videoCanvasRef?.current?.getContext("2d");
if (video.current && videoCanvasRef.current && ctx) {
if (chroma?.keyColor) {
drawChromaFrame(ctx, width, height);
} else {
ctx.drawImage(video.current, 0, 0, width, height);
}
const rendered = () => {
setTimeout(() => {
onReady("video");
}, 20);
};
const startRender = () => {
requestAnimationFrame(rendered);
};
const img = new Image();
img.onload = () => {
requestAnimationFrame(startRender);
};
img.onerror = (e: any) => {
console.log("Image error:", { e });
};
img.src = videoCanvasRef.current.toDataURL();
}
};
Currently my draw code for the WebGL context is something like this
const drawTexture = () => {
// render
if (gl && video.current) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video.current);
drawScene(locationRef.current, buffersRef.current);
}
};
const handlePaintFrame = () => {
if (video.current) {
if (chroma?.keyColor) {
// drawChromaFrame(ctx, width, height);
} else {
drawTexture();
// TODO: We need to wait until texture is drawn
setTimeout(() => {
onReady("video");
}, 100);
}
}
};

the marker is added after the second click state hasn't been updated with the newItem you added yet?

markers on the map appear after the second click on the button.. why ? download data from api.In this.state.items, after clicking the button, data is placed ..
state = {
items:[]
}
handleFetch = () => {
fetch('http://xxx)
.then(resposne => {
if (resposne.ok) {
return resposne;
}
throw Error(resposne.status)
})
.then(response => response.json())
.then(data =>
this.setState({
items: data,
}))
.catch(error => console.log(error))
const map = new mapboxgl.Map({
container: this.mapContainer,
style: 'mapbox://styles/mapbox/streets-v11',
center: [this.state.lng, this.state.lat],
zoom: this.state.zoom,
})
for (var i = 0; i < this.state.items.length; i++) {
var obj = this.state.items[i];
let myLatlng = new mapboxgl.LngLat(obj.X, obj.Y);
new mapboxgl.Marker()
.setLngLat(myLatlng)
.addTo(map);
}
}
render() {
<button onClick={this.handleFetch}>show markers</button>
return (
)}
By the look of your state it seems to be a class component.
The for loop should be called in a callback to wait for the items to be set before to loop over it because the fetch is asynchronous.
Here is the callback function:
const loopOverItems = () => {
for (var i = 0; i < this.state.items.length; i++) {
var obj = this.state.items[i];
let myLatlng = new mapboxgl.LngLat(obj.X, obj.Y);
new mapboxgl.Marker()
.setLngLat(myLatlng)
.addTo(map);
}
}
and here is the last then where you set your items:
.then(data => this.setState({items: data}, loopOverItems));

THREE.js I want to object children assign variable, and loop list but not working

I want to binding variable object3D (for url) file. so I render arrray list map to children name.
You can see render method, i want to ul tag children name list, but this component rendered no data
and button click event, state vis assigned array for children list. and console.log(vis) is alright.
but render method {thsi.state.vis.length > 0 ? ...} is not working
I want to assign variable and checkbox checked is child visible true or false.
let scene;
let viscera = [];
function loadObj(url) {
const objLoader = new THREE.OBJLoader();
objLoader.load(
url,
function(object) {
console.log(object);
let mesh = object;
scene.add(object);
mesh.position.set(0, 2, 0);
mesh.scale.set(2, 2, 2);
object.traverse(child => {
if (child instanceof THREE.Object3D) {
viscera.push(child);
const liver = child.getObjectByName('Liver_Liver.001');
const stomach = child.getObjectByName('Stomach_Stomach.001');
// if (liver && stomach) {
// liver.visible = false;
// stomach.visible = false;
// }
}
})
console.log(viscera);
},
function(xhr) {
console.log((xhr.loaded / xhr.total) * 100 + "% loaded");
},
// called when loading has errors
function(error) {
console.log("An error happened" + error);
});
}
class ObjectLoader extends Component {
constructor(props) {
super(props);
this.state = {
vis: viscera,
get: false
}
}
componentDidMount() {
const width = this.mount.clientWidth;
const height = this.mount.clientHeight;
scene = new THREE.Scene();
this.camera = new THREE.PerspectiveCamera(75, width / height, 0.1, 2000);
this.camera.position.z = 8;
this.renderer = new THREE.WebGLRenderer({ antialias: true });
this.renderer.setClearColor("#263238");
this.renderer.setSize(width, height);
this.mount.appendChild(this.renderer.domElement);
// const geometry = new THREE.BoxGeometry(5, 5, 5);
// const material = new THREE.MeshBasicMaterial({
// color: "#0F0",
// wireframe: true
// });
// this.cube = new THREE.Mesh(geometry, material);
// scene.add(this.cube);
const controls = new OrbitControls(this.camera, this.renderer.domElement);
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = true;
//LIGHTS
const keyLight = new THREE.DirectionalLight(new THREE.Color('hsl(30, 100%, 75%)'), 1.0);
keyLight.position.set(-100, 0, 100);
const fillLight = new THREE.DirectionalLight(new THREE.Color('hsl(240, 100%, 75%)'), 0.75);
fillLight.position.set(100, 0, 100);
const backLight = new THREE.DirectionalLight(0xffffff, 1.0);
backLight.position.set(100, 0, -100).normalize();
scene.add(keyLight);
scene.add(fillLight);
scene.add(backLight);
// load Object
this.animate();
loadObj(this.props.url);
}
componentWillUnmount() {
this.stop();
this.mount.removeChild(this.renderer.domElement);
}
getViscera = (vis) => {
this.props.getViscera(vis);
}
start = () => {
if (!this.frameId) {
this.frameId = requestAnimationFrame(this.animate);
}
};
stop = () => {
cancelAnimationFrame(this.frameId);
};
animate = () => {
this.renderScene();
this.frameId = window.requestAnimationFrame(this.animate);
};
renderScene = () => {
if (this.renderer) this.renderer.render(scene, this.camera);
};
// handleViscera = () => {
// for (let i = 0; i < viscera.length; i++) {
// console.log(viscera[i].name);
// }
// }
onLoad = () => {
this.renderScene();
//start animation
this.start();
};
onProgress = xhr => {
console.log((xhr.loaded / xhr.total) * 100 + "% loaded");
};
// Function called when download errors
onError = error => {
console.log("An error happened" + error);
};
// getChildren = () => {
// this.setState({
// get: true
// })
// if (this.state.vis && this.state.get === true) {
// return (
// <ul>
// {this.state.vis.map(item => {
// console.log(item);
// <li key={item}>{item.name}</li>
// })}
// </ul>
// )
// }
// }
render() {
return (
<div className="objLoader">
<div
style={{ width: "500px", height: "500px" }}
ref={mount => {
this.mount = mount;
}}
/>
<h2>Children</h2>
<button className="children-btn" onClick={() => this.getViscera(this.state.vis)}>Get Children</button>
{
this.state.vis.length > 0 ?
<ul>
{this.state.vis.map(v => {
<li>{v.name}</li>
})}
</ul>
: <div>no data</div>
}
</div>
)
}
}

Categories

Resources