React Native d3 azimuthal equal-area rotate not smooth - javascript

I am doing the d3 azimuthal equal-area projection in react-native, i used this example for this. its working fine but im updating rotate values using panGestureHandler this is also working but it's not smooth and it's take time to update map.
this the repo of this.
this is the code where i update rotate values:
const countryPaths = useMemo(() => {
const clipAngle = 150;
const projection = d3
.geoAzimuthalEqualArea()
// .rotate([0, -90])
.rotate([rotateX, rotateY])
.fitSize([mapExtent, mapExtent], {
type: 'FeatureCollection',
features: COUNTRIES,
})
.clipAngle(clipAngle)
.translate([dimensions.width / 2, mapExtent / 2]);
const geoPath = d3.geoPath().projection(projection);
const windowPaths = COUNTRIES.map(geoPath);
return windowPaths;
}, [dimensions, rotateX, rotateY]);
here is my complete code
App.js
import React, {useState, useMemo, useEffect, useRef} from 'react';
import {
StyleSheet,
View,
Dimensions,
Animated,
PanResponder,
Text,
SafeAreaView,
} from 'react-native';
import Map from './components/Map';
import COLORS from './constants/Colors';
import movingAverage from './functions/movingAverage';
import * as d3 from 'd3';
import covidData_raw from './assets/data/who_data.json';
export default function App(props) {
const dimensions = Dimensions.get('window');
const [stat, setStat] = useState('avg_confirmed');
const [date, setDate] = useState('2020-04-24');
//Data Manipulation
const covidData = useMemo(() => {
const countriesAsArray = Object.keys(covidData_raw).map((key) => ({
name: key,
data: covidData_raw[key],
}));
const windowSize = 7;
const countriesWithAvg = countriesAsArray.map((country) => ({
name: country.name,
data: [...movingAverage(country.data, windowSize)],
}));
const onlyCountriesWithData = countriesWithAvg.filter(
(country) => country.data.findIndex((d, _) => d[stat] >= 10) != -1,
);
return onlyCountriesWithData;
}, []);
const maxY = useMemo(() => {
return d3.max(covidData, (country) => d3.max(country.data, (d) => d[stat]));
}, [stat]);
const colorize = useMemo(() => {
const colorScale = d3
.scaleSequentialSymlog(d3.interpolateReds)
.domain([0, maxY]);
return colorScale;
});
return (
<SafeAreaView>
<View>
<Map
dimensions={dimensions}
data={covidData}
date={date}
colorize={colorize}
stat={stat}
/>
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: COLORS.primary,
alignItems: 'center',
justifyContent: 'center',
},
rotateView: {
width: 300,
height: 300,
backgroundColor: 'black',
shadowOpacity: 0.2,
},
});
map.js
import React, {useMemo, useState, useEffect} from 'react';
import {StyleSheet, View, Animated, PanResponder} from 'react-native';
//LIBRARIES
import Svg, {G, Path, Circle} from 'react-native-svg';
import * as d3 from 'd3';
import {
PanGestureHandler,
PinchGestureHandler,
State,
} from 'react-native-gesture-handler';
//CONSTANTS
import {COUNTRIES} from '../constants/CountryShapes';
import COLORS from '../constants/Colors';
//COMPONENTS
import Button from './Button';
const Map = (props) => {
const [countryList, setCountryList] = useState([]);
const [translateX, setTranslateX] = useState(0);
const [translateY, setTranslateY] = useState(0);
const [lastTranslateX, setLastTranslateX] = useState(0);
const [lastTranslateY, setLastTranslateY] = useState(0);
const [buttonOpacity, _] = useState(new Animated.Value(0));
const [scale, setScale] = useState(1);
const [prevScale, setPrevScale] = useState(1);
const [lastScaleOffset, setLastScaleOffset] = useState(0);
const [rotateX, setrotateX] = useState();
const [rotateY, setrotateY] = useState();
const {dimensions, data, date, colorize, stat} = props;
//Gesture Handlers
const panStateHandler = (event) => {
if (event.nativeEvent.oldState === State.UNDETERMINED) {
setLastTranslateX(translateX);
setLastTranslateY(translateY);
}
if (event.nativeEvent.oldState === State.ACTIVE) {
Animated.timing(buttonOpacity, {
toValue: 1,
duration: 1000,
useNativeDriver: true,
}).start();
}
};
const panGestureHandler = (event) => {
console.log('event', event.nativeEvent);
setrotateX(event.nativeEvent.x);
setrotateX(event.nativeEvent.y);
setTranslateX(-event.nativeEvent.translationX / scale + lastTranslateX);
setTranslateY(-event.nativeEvent.translationY / scale + lastTranslateY);
};
const pinchStateHandler = (event) => {
if (event.nativeEvent.oldState === State.UNDETERMINED) {
setLastScaleOffset(-1 + scale);
}
if (event.nativeEvent.oldState === State.ACTIVE) {
Animated.timing(buttonOpacity, {
toValue: 1,
duration: 1000,
useNativeDriver: true,
}).start();
}
};
const pinchGestureHandler = (event) => {
if (
event.nativeEvent.scale + lastScaleOffset >= 1 &&
event.nativeEvent.scale + lastScaleOffset <= 5
) {
setPrevScale(scale);
setScale(event.nativeEvent.scale + lastScaleOffset);
setTranslateX(
translateX -
(event.nativeEvent.focalX / scale -
event.nativeEvent.focalX / prevScale),
);
setTranslateY(
translateY -
(event.nativeEvent.focalY / scale -
event.nativeEvent.focalY / prevScale),
);
}
};
//Initialize Map Transforms
const initializeMap = () => {
setTranslateX(0);
setTranslateY(0);
setScale(1);
setPrevScale(1);
setLastScaleOffset(0);
Animated.timing(buttonOpacity, {
toValue: 0,
duration: 1000,
useNativeDriver: true,
}).start();
};
//Create Map Paths
const mapExtent = useMemo(() => {
return dimensions.width > dimensions.height / 2
? dimensions.height / 2
: dimensions.width;
}, [dimensions]);
const countryPaths = useMemo(() => {
const clipAngle = 150;
const projection = d3
.geoAzimuthalEqualArea()
// .rotate([0, -90])
.rotate([rotateX, rotateY])
.fitSize([mapExtent, mapExtent], {
type: 'FeatureCollection',
features: COUNTRIES,
})
.clipAngle(clipAngle)
.translate([dimensions.width / 2, mapExtent / 2]);
const geoPath = d3.geoPath().projection(projection);
const windowPaths = COUNTRIES.map(geoPath);
return windowPaths;
}, [dimensions, rotateX, rotateY]);
useEffect(() => {
setCountryList(
countryPaths.map((path, i) => {
const curCountry = COUNTRIES[i].properties.name;
const isCountryNameInData = data.some(
(country) => country.name === curCountry,
);
const curCountryData = isCountryNameInData
? data.find((country) => country.name === curCountry)['data']
: null;
const isDataAvailable = isCountryNameInData
? curCountryData.some((data) => data.date === date)
: false;
const dateIndex = isDataAvailable
? curCountryData.findIndex((x) => x.date === date)
: null;
return (
<Path
key={COUNTRIES[i].properties.name}
d={path}
stroke={COLORS.greyLight}
strokeOpacity={0.3}
strokeWidth={0.6}
fill={
isDataAvailable
? colorize(curCountryData[dateIndex][stat])
: COLORS.greyLight
}
opacity={isDataAvailable ? 1 : 0.4}
/>
);
}),
);
}, [rotateX, rotateY]);
return (
<View>
<PanGestureHandler
onGestureEvent={(e) => panGestureHandler(e)}
onHandlerStateChange={(e) => panStateHandler(e)}>
<PinchGestureHandler
onGestureEvent={(e) => pinchGestureHandler(e)}
onHandlerStateChange={(e) => pinchStateHandler(e)}>
<Svg
width={dimensions.width}
height={dimensions.height / 2}
style={styles.svg}>
<G
// transform={`scale(${scale}) translate(${-translateX},${-translateY})`}
>
<Circle
cx={dimensions.width / 2}
cy={mapExtent / 2}
r={mapExtent / 2}
fill={COLORS.lightPrimary}
/>
{countryList.map((x) => x)}
</G>
</Svg>
</PinchGestureHandler>
</PanGestureHandler>
</View>
);
};
const styles = StyleSheet.create({
svg: {},
rotateView: {
width: 100,
height: 400,
backgroundColor: 'black',
shadowOffset: {height: 1, width: 1},
shadowOpacity: 0.2,
},
});
export default Map;

how i fixed this issue is :
I cange countries json to countries-110m.json';
delete the rotateX, rotateY and replace by translateX translateY
new rotate code is: .rotate([-translateX, translateY])
if any doubts please check my complete source code from Github

Related

How to create a compass that points to specific coordinates (React-Native)

Here is what I have for now:
import {
Alert,
Animated,
Easing,
Linking,
StyleSheet,
Text,
View,
} from "react-native";
import React, { useEffect, useState } from "react";
import * as Location from "expo-location";
import * as geolib from "geolib";
import { COLORS } from "../../assets/Colors/Colors";
export default function DateFinder() {
const [hasForegroundPermissions, setHasForegroundPermissions] =
useState(null);
const [userLocation, setUserLocation] = useState(null);
const [userHeading, setUserHeading] = useState(null);
const [angle, setAngle] = useState(0);
useEffect(() => {
const AccessLocation = async () => {
function appSettings() {
console.warn("Open settigs pressed");
if (Platform.OS === "ios") {
Linking.openURL("app-settings:");
} else RNAndroidOpenSettings.appDetailsSettings();
}
const appSettingsALert = () => {
Alert.alert(
"Allow Wassupp to Use your Location",
"Open your app settings to allow Wassupp to access your current position. Without it, you won't be able to use the love compass",
[
{
text: "Cancel",
onPress: () => console.warn("Cancel pressed"),
},
{ text: "Open settings", onPress: appSettings },
]
);
};
const foregroundPermissions =
await Location.requestForegroundPermissionsAsync();
if (
foregroundPermissions.canAskAgain == false ||
foregroundPermissions.status == "denied"
) {
appSettingsALert();
}
setHasForegroundPermissions(foregroundPermissions.status === "granted");
if (foregroundPermissions.status == "granted") {
const location = await Location.watchPositionAsync(
{
accuracy: Location.Accuracy.BestForNavigation,
activityType: Location.ActivityType.Fitness,
distanceInterval: 0,
},
(location) => {
setUserLocation(location);
}
);
const heading = await Location.watchHeadingAsync((heading) => {
setUserHeading(heading.trueHeading);
});
}
};
AccessLocation().catch(console.error);
}, []);
useEffect(() => {
if (userLocation != null) {
setAngle(getBearing() - userHeading);
rotateImage(angle);
}
}, [userLocation]);
const textPosition = JSON.stringify(userLocation);
const getBearing = () => {
const bearing = geolib.getGreatCircleBearing(
{
latitude: userLocation.coords.latitude,
longitude: userLocation.coords.longitude,
},
{
latitude: 45.47200370608976,
longitude: -73.86246549592089,
}
);
return bearing;
};
const rotation = new Animated.Value(0);
console.warn(angle);
const rotateImage = (angle) => {
Animated.timing(rotation, {
toValue: angle,
duration: 1000,
easing: Easing.bounce,
useNativeDriver: true,
}).start();
};
//console.warn(rotation);
return (
<View style={styles.background}>
<Text>{textPosition}</Text>
<Animated.Image
source={require("../../assets/Compass/Arrow_up.png")}
style={[styles.image, { transform: [{ rotate: `${angle}deg` }] }]}
/>
</View>
);
}
const styles = StyleSheet.create({
background: {
backgroundColor: COLORS.background_Pale,
flex: 1,
// justifyContent: "flex-start",
//alignItems: "center",
},
image: {
flex: 1,
// height: null,
// width: null,
//alignItems: "center",
},
scrollView: {
backgroundColor: COLORS.background_Pale,
},
});
I think that the math I'm doing must be wrong because the arrow is pointing random directions spinning like crazy and not going to the coordinate I gave it. Also, I can't seem to use the rotateImage function in a way that rotation would be animated and i'd be able to use it to animate the image/compass. If anyone could help me out i'd really appreciate it I've been stuck on this for literally weeks.

react-native error: expected conv2d_input to have shape [null,32,32,1] but got array with shape [1,32,32,3]

I am making an app using react-native-expo.
We tried to recognize objects in conjunction with the camera with the trained model.
In order to use the existing Python code in react-native-expo, I tried to port the code to fit js.
But it wasn't as easy as I thought. please tell me how to solve it.
python
while True:
# READ IMAGE
success, imgOrignal = cap.read()
img = np.asarray(imgOrignal)
img = cv2.resize(img, (32, 32))
img = preprocessing(img)
cv2.imshow("Processed Image", img)
img = img.reshape(1, 32, 32, 1)
cv2.putText(imgOrignal, "CLASS: " , (20, 35), font, 0.75, (0, 0, 255), 2, cv2.LINE_AA)
cv2.putText(imgOrignal, "PROBABILITY: ", (20, 75), font, 0.75, (0, 0, 255), 2, cv2.LINE_AA)
predictions = model.predict(img)
classIndex = np.argmax(predictions[0])
probabilityValue =np.amax(predictions)
if probabilityValue > threshold:
cv2.putText(imgOrignal,str(classIndex)+" "+str(getCalssName(classIndex)), (120, 35), font, 0.75, (0, 0, 255), 2, cv2.LINE_AA)
cv2.putText(imgOrignal, str(round(probabilityValue*100,2) )+"%", (180, 75), font, 0.75, (0, 0, 255), 2, cv2.LINE_AA)
cv2.imshow("Result", imgOrignal)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
react-native-expo
/* eslint-disable react-hooks/exhaustive-deps */
import React, { createRef, useCallback, useEffect, useState } from "react";
import { Platform, SafeAreaView, StyleSheet } from "react-native";
import * as tf from "#tensorflow/tfjs";
import {
bundleResourceIO,
cameraWithTensors,
} from "#tensorflow/tfjs-react-native";
import { Camera, CameraType } from "expo-camera";
const CAMERA_SIZE = { height: 640, width: 480 };
const App = () => {
const [signDetector, setSignDetector] = useState<tf.LayersModel>();
const TensorCamera = cameraWithTensors(Camera);
const tensorCameraRef = createRef<any>();
const textureDims =
Platform.OS === "ios"
? { height: 1920, width: 1080 }
: { height: 1200, width: 1600 };
useEffect(() => {
loadModel();
}, []);
const loadModel = useCallback(async () => {
const isAllowedCamera = fn_requestPermisison();
if (!isAllowedCamera) return;
console.log("[+] Application started");
//Wait for tensorflow module to be ready
await tf.ready();
console.log("[+] Loading custom mask detection model");
//Replce model.json and group1-shard.bin with your own custom model
const modelJson = await require("./assets/AIDatas/model.json");
const modelWeight = await require("./assets/AIDatas/group1-shard1of1.bin");
const newSignDetector = await tf.loadLayersModel(
bundleResourceIO(modelJson, modelWeight)
);
console.log("[+] Loading pre-trained face detection model");
console.log("summary", signDetector?.summary());
setSignDetector(newSignDetector);
console.log("[+] Model Loaded");
}, []);
const fn_estimateBlazeFace = async (tensorImage: any) => {
if (signDetector) {
const predictions: any = await signDetector.predict(tensorImage, {
batchSize: 3,
});
console.log(`[+] [${predictions}]`);
}
return true;
};
const fn_onReadyTensorCamera = async (
images: IterableIterator<tf.Tensor3D>
) => {
const loop = async () => {
if (signDetector !== undefined) {
const nextImageTensor = tf.image.resizeBilinear(
images.next().value,
[32, 32],
true
);
const expandedImageTensor = tf.expandDims(nextImageTensor, 0);
console.log("expandedImageTensor", expandedImageTensor);
await fn_estimateBlazeFace(expandedImageTensor)
.then((isEstimate) => {
console.log("result", isEstimate);
})
.catch((error) => {
console.log(error);
});
setTimeout(() => {
requestAnimationFrame(loop);
}, 20000);
}
};
loop();
};
const fn_requestPermisison = async () => {
const { status } = await Camera.requestCameraPermissionsAsync();
if (status !== "granted") {
console.log("The camera's permission request was not approved.");
return false;
}
return true;
};
return (
<SafeAreaView style={styles.container}>
<TensorCamera
ref={tensorCameraRef}
style={styles.camera}
type={CameraType.back}
cameraTextureHeight={textureDims.height}
cameraTextureWidth={textureDims.width}
resizeHeight={CAMERA_SIZE.height}
resizeWidth={CAMERA_SIZE.width}
resizeDepth={3}
autorender={true}
useCustomShadersToResize={false}
onReady={fn_onReadyTensorCamera}
/>
</SafeAreaView>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
camera: {
position: "absolute",
width: CAMERA_SIZE.width,
height: CAMERA_SIZE.height,
},
});
export default App;
;

React time lines

I have a big problem with React TimeLines Package(https://openbase.com/js/react-timelines)
I want something like this photo:
( having 3 P tags with different ClassNames)
but in default case of this package I cant do it!
I think I should use something like createElement and textContent in JS. but I dont know how!
My Codes:
import React, { Component } from "react";
import Timeline from "react-timelines";
import "react-timelines/lib/css/style.css";
import { START_YEAR, NUM_OF_YEARS, NUM_OF_TRACKS } from "./constant";
import { buildTimebar, buildTrack } from "./builder";
import { fill } from "./utils";
const now = new Date("2021-01-01");
const timebar = buildTimebar();
// eslint-disable-next-line no-alert
const clickElement = (element) =>
alert(`Clicked element\n${JSON.stringify(element, null, 2)}`);
class App extends Component {
constructor(props) {
super(props);
const tracksById = fill(NUM_OF_TRACKS).reduce((acc, i) => {
const track = buildTrack(i + 1);
acc[track.id] = track;
return acc;
}, {});
this.state = {
open: false,
zoom: 2,
// eslint-disable-next-line react/no-unused-state
tracksById,
tracks: Object.values(tracksById),
};
}
handleToggleOpen = () => {
this.setState(({ open }) => ({ open: !open }));
};
handleToggleTrackOpen = (track) => {
this.setState((state) => {
const tracksById = {
...state.tracksById,
[track.id]: {
...track,
isOpen: !track.isOpen,
},
};
return {
tracksById,
tracks: Object.values(tracksById),
};
});
};
render() {
const { open, zoom, tracks } = this.state;
const start = new Date(`${START_YEAR}`);
const end = new Date(`${START_YEAR + NUM_OF_YEARS}`);
return (
<div className="app">
<Timeline
scale={{
start,
end,
zoom,
}}
isOpen={open}
toggleOpen={this.handleToggleOpen}
clickElement={clickElement}
timebar={timebar}
tracks={tracks}
now={now}
enableSticky
scrollToNow
/>
</div>
);
}
}
export default App;
builder.js:
export const buildElement = ({ trackId, start, end, i }) => {
const bgColor = nextColor();
const color = colourIsLight(...hexToRgb(bgColor)) ? "#000000" : "#ffffff";
return {
id: `t-${trackId}-el-${i}`,
title: "Bye Title: Hello Type: String",
start,
end,
style: {
backgroundColor: `#${bgColor}`,
color,
borderRadius: "12px",
width: "auto",
height: "120px",
textTransform: "capitalize",
},
};
};

Zoom restriction to a certain image size in react

Hello everyone we are building an app for a company. My task is to create an image crop feature for profile pictures. I am using the react-easy-crop library. I have implemented nearly all the features the only missing thing is zooming in to a certain pixel. Our company doesn't want users to be able to zoom in less than 600 X 600 pixels. What I meany by that is when users zoom in the image size can't be lower than 600 x 600 pixels. here is my code structure (Be aware this not the full code I have reduced it for complexity issues)
import React, { useEffect, useRef, useState, FC, useCallback, SetStateAction } from 'react';
import Cropper from 'react-easy-crop';
import Resizer from 'react-image-file-resizer';
import { Text, Button, DragAndDrop } from '#shared/components';
import { Modal } from '#shared/components/Modal/lazy';
import { getOrientation } from 'get-orientation';
import { COLOR_NAMES } from '#src/settings/colors.constants';
import { Icon } from '#src/shared/components/icon';
import { Centered } from '#src/shared/components/layout/centered';
import { useDispatch } from 'react-redux';
import { Flex } from 'rebass';
import { Dispatch } from 'redux';
import { ZoomAndApplyContainer, CropContainer } from '#app/shared/components/crop-image/styled';
import { FileValue } from '#shared/components/upload-single-document/interfaces';
import { UploadSingleImageProps } from '#app/shared/components/upload-single-image/interfaces';
import { CoverPicEditComponent, ImageUploadContainer, PicEditComponent } from '#app/shared/components/upload-single-image/styled';
import { MODAL_NAMES, MODAL_TYPES } from '#app/shared/store/constants/modal.constants';
import { ShowModalAC, HideModalAC } from '#app/shared/store/actions/modal.actions';
import { NumberOfBytesInOneMB, TOASTER_APPEARANCES } from '#app/shared/constants';
import { SetToastsActionCreator } from '#app/shared/store/actions/toast.actions';
import { validateFileType } from '#app/utils/validations';
import { PRIMARY } from '../button/button.constants';
import { FormikFieldErrorMessage } from '../formik-field/styled';
const readFile: any = (file: any) =>
new Promise((resolve: any) => {
const reader: any = new FileReader();
reader.addEventListener('load', () => resolve(reader.result), false);
reader.readAsDataURL(file);
});
const createImage: any = (url: any) =>
new Promise((resolve: any, reject: any) => {
const image: any = new Image();
image.addEventListener('load', () => resolve(image));
image.addEventListener('error', (error: any) => reject(error));
image.setAttribute('crossOrigin', 'anonymous'); // needed to avoid cross-origin issues on CodeSandbox
image.src = url;
});
const getRadianAngle: any = (degreeValue: any) => (degreeValue * Math.PI) / 180;
const ORIENTATION_TO_ANGLE: any = {
3: 180,
6: 90,
8: -90,
};
/**
* This function was adapted from the one in the ReadMe of https://github.com/DominicTobias/react-image-crop
* #param {File} image - Image File url
* #param {Object} pixelCrop - pixelCrop Object provided by react-easy-crop
* #param {number} rotation - optional rotation parameter
*/
const getCroppedImg: any = async (imageSrc: any, pixelCrop: any, rotation: any = 0) => {
const image: any = await createImage(imageSrc);
const canvas: any = document.createElement('canvas');
const ctx: any = canvas.getContext('2d');
const maxSize: any = Math.max(image.width, image.height);
const safeArea: any = 2 * ((maxSize / 2) * Math.sqrt(2));
// set each dimensions to double largest dimension to allow for a safe area for the
// image to rotate in without being clipped by canvas context
canvas.width = safeArea;
canvas.height = safeArea;
// translate canvas context to a central location on image to allow rotating around the center.
ctx.translate(safeArea / 2, safeArea / 2);
ctx.rotate(getRadianAngle(rotation));
ctx.translate(-safeArea / 2, -safeArea / 2);
// draw rotated image and store data.
ctx.drawImage(image, safeArea / 2 - image.width * 0.5, safeArea / 2 - image.height * 0.5);
const data: any = ctx.getImageData(0, 0, safeArea, safeArea);
// set canvas width to final desired crop size - this will clear existing context
canvas.width = pixelCrop.width;
canvas.height = pixelCrop.height;
// paste generated rotate image with correct offsets for x,y crop values.
ctx.putImageData(data, Math.round(0 - safeArea / 2 + image.width * 0.5 - pixelCrop.x), Math.round(0 - safeArea / 2 + image.height * 0.5 - pixelCrop.y));
// As Base64 string
// return canvas.toDataURL('image/jpeg');
// As a blob
return new Promise((resolve: any) => {
canvas.toBlob((file: any) => {
resolve(file);
}, 'image/jpeg');
});
};
const getRotatedImage: any = async (imageSrc: any, rotation: number = 0) => {
const image: any = await createImage(imageSrc);
const canvas: any = document.createElement('canvas');
const ctx: any = canvas.getContext('2d');
const orientationChanged: boolean = rotation === 90 || rotation === -90 || rotation === 270 || rotation === -270;
if (orientationChanged) {
canvas.width = image.height;
canvas.height = image.width;
} else {
canvas.width = image.width;
canvas.height = image.height;
}
ctx.translate(canvas.width / 2, canvas.height / 2);
ctx.rotate((rotation * Math.PI) / 180);
ctx.drawImage(image, -image.width / 2, -image.height / 2);
return new Promise((resolve: any) => {
canvas.toBlob((file: any) => {
resolve(URL.createObjectURL(file));
}, 'image/jpeg');
});
};
export const UploadSingleImage: FC<UploadSingleImageProps> = ({
setFieldValue,
setFieldTouched,
name,
extensionName,
width = '600',
height = '600',
errorMessage,
isDisabled,
fileId,
extension,
title,
validationRules,
isCoverPhoto,
customContainerCss,
onChange,
editIconName = 'edit',
onUploaded,
}: UploadSingleImageProps) => {
const [value, setValue]: [FileValue, React.Dispatch<SetStateAction<FileValue>>] = useState(null);
const [imgSrc, setImgSrc]: any = useState(null);
const [maxZoom, setMaxZoom]: any = useState(1);
const [rotation, setRotation]: any = useState(0);
const [crop, setCrop]: any = useState({ x: 0, y: 0 });
const [imageSendingFail, setImageSendingFail]: [boolean, React.Dispatch<SetStateAction<boolean>>] = useState(true);
const [zoom, setZoom]: any = useState(1);
const [croppedAreaPixels, setCroppedAreaPixels]: any = useState(null);
const showCroppedImage: any = useCallback(async () => {
try {
const cropedImage: any = await getCroppedImg(imgSrc, croppedAreaPixels, rotation);
Resizer.imageFileResizer(
cropedImage,
600,
600,
'JPEG',
100,
0,
(file: any) => {
onChange(file, setValue);
dispatch(HideModalAC(MODAL_NAMES.IMAGE_CROP_MODAL));
setImgSrc(null);
},
'blob'
);
} catch (e) {
console.error(e);
}
}, [imgSrc, croppedAreaPixels, rotation]);
const imageInput: React.MutableRefObject<HTMLInputElement> = useRef();
const dispatch: Dispatch = useDispatch();
const toast: any = useCallback((toasts: any) => dispatch(SetToastsActionCreator(toasts)), [dispatch]);
const onCropComplete: any = useCallback((croppedArea: any, croppedAreaPixel: any) => {
setCroppedAreaPixels(croppedAreaPixel);
}, []);
const handleFileDrop: any = (e: any) => {
const files: any = e.dataTransfer.files;
if (files && files.length === 1) {
validateImage(files[0]);
}
};
const onFileChange: any = async (e: any) => {
if (e.target.files && e.target.files.length === 1) {
const file: any = e.target.files[0];
validateImage(file);
}
};
const onClick: any = (e: any) => {
setZoom(1);
e.target.value = '';
};
const validateImage: (file: File) => void = async (file: any) => {
setImageSendingFail(false);
// const imageDataUrl: any = await readFile(file);
// setImgSrc(imageDataUrl);
if (setFieldTouched) {
setFieldTouched(name);
}
if (validateFileType(toast, validationRules?.fileTypes, file)) {
let imageDataUrl: any = await readFile(file);
const img: any = createImage(imageDataUrl);
if (!validationRules || validateImg(toast, validationRules, img, file)) {
const orientation: any = await getOrientation(file);
const rotationPortion: any = ORIENTATION_TO_ANGLE[orientation];
if (rotation) {
imageDataUrl = await getRotatedImage(imageDataUrl, rotationPortion);
}
setImgSrc(imageDataUrl);
dispatch(ShowModalAC(MODAL_NAMES.IMAGE_CROP_MODAL));
} else {
imageInput.current.value = '';
setImageSendingFail(true);
}
}
};
useEffect(() => {
if (fileId && extension) {
setValue({ fileId, extension });
}
}, [fileId, extension]);
useEffect(() => {
if (setFieldValue) {
setFieldValue(name, value?.fileId);
setFieldValue(extensionName, value?.extension);
}
if (onUploaded && value?.fileId) {
onUploaded(name, value);
}
}, [value?.fileId, value?.extension]);
return (
<Flex justifyContent={'center'} alignItems={'center'} flexDirection={'column'} css={{ height: '100%' }} name={name}>
{imgSrc && (
<Modal bodyCss={{ padding: 0 }} bodyHeight='90%' width={'70%'} height={'85%'} borderRadius={'4px'} modalId={MODAL_NAMES.IMAGE_CROP_MODAL} headingText={'Resmi Düzenle'} type={MODAL_TYPES.NORMAL}>
<CropContainer>
<div style={{ width: '80%', height: '70%', position: 'relative' }}>
<Cropper
image={imgSrc}
crop={crop}
zoom={zoom}
rotation={rotation}
aspect={1 / 1}
onCropChange={setCrop}
onCropComplete={onCropComplete}
onZoomChange={setZoom}
restrictPosition={false}
onRotationChange={setRotation}
minZoom={0.5}
maxZoom={maxZoom}
onMediaLoaded={(imageSize: any) => {
if (imageSize.naturalWidth > 600) {
setMaxZoom(600 / Math.max(imageSize.height, imageSize.width));
} else {
setMaxZoom(1);
}
console.log(imageSize);
}}
/>
</div>
<ZoomAndApplyContainer>
<input type='range' value={zoom} min={0.5} max={maxZoom} step={0.05} onChange={(e: any) => setZoom(e.target.value)} />
<input type='range' value={rotation} min={0} max={360} step={10} onChange={(e: any) => setRotation(e.target.value)} />
<Button button={PRIMARY} onClick={showCroppedImage}>
Upload
</Button>
</ZoomAndApplyContainer>
</CropContainer>
</Modal>
)}
<DragAndDrop handleDrop={handleFileDrop}>
<ImageUploadContainer customContainerCss={customContainerCss} url={fileId && extension && `${fileId}${extension}`} width={width} height={height} errorMessage={errorMessage}>
{value?.fileId && value?.extension && isCoverPhoto && !isDisabled && (
<CoverPicEditComponent>
<label htmlFor={name}>
<Icon margin={'auto'} name={'upload-white'} />
<Text color={COLOR_NAMES.REMAX_WHITE} customWeight={1}>
Yeni kapak fotoğrafı yükle
</Text>
</label>
</CoverPicEditComponent>
)}
{value?.fileId && value?.extension && !isCoverPhoto && !isDisabled && (
<PicEditComponent className='edit-icon-container' htmlFor={name}>
<Icon name={editIconName} />
</PicEditComponent>
)}
{!(value?.fileId && value?.extension) && (
<Centered css={{ flexDirection: 'column' }}>
<Text customSize={[2, 3, 3]} lineHeight={1} customWeight={1} color={COLOR_NAMES.REMAX_TEXT_GREY_7f7f7f} css={{ textAlign: 'center', width: '145px', paddingBottom: '12px' }}>
{title}
</Text>
<label htmlFor={name}>
<Text customSize={[2, 3, 3]} customWeight={1} color={COLOR_NAMES.REMAX_BLUE_SELECTED_1451EF} textDecoration={'underline'}>
Dosya Seç
</Text>
</label>
</Centered>
)}
<input id={name} ref={imageInput} name={name} type='file' onChange={onFileChange} onClick={onClick} />
</ImageUploadContainer>
</DragAndDrop>
{/* Eğer resim yok ve upload işlemi fail olduysa hata mesajı basılsın */}
{!value?.fileId && imageSendingFail && errorMessage && <FormikFieldErrorMessage elipsis={true}>{errorMessage}</FormikFieldErrorMessage>}
</Flex>
);
};
<script src="https://cdnjs.cloudflare.com/ajax/libs/react/16.6.3/umd/react.production.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.6.3/umd/react-dom.production.min.js"></script>
here is my solution for the problem: this callback inside the cropper
onMediaLoaded={(imageSize: any) => {
if (imageSize.naturalWidth > 600) {
setMaxZoom(600 / Math.max(imageSize.height, imageSize.width));
} else {
setMaxZoom(1);
}
}}
<script src="https://cdnjs.cloudflare.com/ajax/libs/react/16.6.3/umd/react.production.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.6.3/umd/react-dom.production.min.js"></script>
but this code doesn't ensure that I have a max zoom restriction that produces 600 x 600 in the end. (It works for some images but some others not. I would really appreciate any suggestion thanks in advance.
const onCropComplete: any = useCallback((croppedArea: any, croppedAreaPixel: any) => {
if(croppedAreaPixel.width < requiredWidth || croppedAreaPixel.height< requiredHeight){
/** do something here to disallow zoom
* this is a workaround
*/
}
setCroppedAreaPixels(croppedAreaPixel);
}, []);

Deck.GL How to adjust setstate correctly so that Scatterplotlayer and Tripslayer can render synchronously

(1) In deck.gl, realize the visual effect after overlaying the two layers. The effect now achieved is that TripsLayer can be automatically rendered because of the addition of animate, but ScatterplotLayer can only achieve the animation effect by dragging the timeline progress bar below, as shown below. That is, after superposition, they now appear to be two unrelated layers. In ScatterplotLayer (with DataFilterExtension added), the radius of the circle is still controlled by the progress bar at the bottom of the screen. Drag to change the size instead of auto rendering similar to TripsLayer.
I want them to achieve simultaneous rendering based on the common attribute timestamp. In other words, when the trajectory of TripsLayer moves, the circle radius of ScatterplotLayer will also change accordingly. I also added animate in ScatterplotLayer before, but it has no effect, it may be that Setstate is not adjusted correctly. Thank you very much for your help ~~
Screenshot of the effect now achieved and the complete code is as follows:
import React, {Component, Fragment} from 'react';
import {render} from 'react-dom';
import {StaticMap} from 'react-map-gl';
import {AmbientLight, PointLight, LightingEffect} from '#deck.gl/core';
import DeckGL from '#deck.gl/react';
import {PolygonLayer} from '#deck.gl/layers';
import {TripsLayer} from '#deck.gl/geo-layers';
import {ScatterplotLayer} from '#deck.gl/layers';
import {DataFilterExtension} from '#deck.gl/extensions';
import {MapView} from '#deck.gl/core';
import RangeInput from './range-input';
// Set your mapbox token here
const MAPBOX_TOKEN = process.env.MapboxAccessToken; // eslint-disable-line
// Source data CSV
const DATA_URL1 = {
TRIPS:
'./package1.json' // eslint-disable-line
};
const DATA_URL =
'./data2.csv'; // eslint-disable-line
const MAP_VIEW = new MapView({
// 1 is the distance between the camera and the ground
farZMultiplier: 100
});
const ambientLight = new AmbientLight({
color: [122, 122, 122],
intensity: 1.0
});
const pointLight = new PointLight({
color: [255, 255, 255],
intensity: 2.0,
position: [127.05, 37.5, 8000]
});
const lightingEffect = new LightingEffect({ambientLight, pointLight});
const material = {
ambient: 0.1,
diffuse: 0.6,
shininess: 32,
specularColor: [60, 64, 70]
};
const DEFAULT_THEME = {
buildingColor: [74, 80, 87],
trailColor0: [253, 128, 93],
trailColor1: [23, 184, 190],
material,
effects: [lightingEffect]
};
const INITIAL_VIEW_STATE = {
longitude: 126.9779692,
latitude: 37.566535,
zoom: 6,
pitch: 0,
bearing: 0
};
const landCover = [[[-74.0, 40.7], [-74.02, 40.7], [-74.02, 40.72], [-74.0, 40.72]]];
const MS_PER_DAY = 8.64e7; // milliseconds in a day
const dataFilter = new DataFilterExtension({filterSize: 1});
export default class App extends Component {
constructor(props) {
super(props);
this.state1 = {
time: 0
};
const timeRange = this._getTimeRange(props.data);
this.state = {
timeRange,
filterValue: timeRange,
hoveredObject: null,
};
this._onHover = this._onHover.bind(this);
this._renderTooltip = this._renderTooltip.bind(this);
}
componentWillReceiveProps(nextProps) {
if (nextProps.data !== this.props.data) {
const timeRange = this._getTimeRange(nextProps.data);
this.setState({timeRange, filterValue: timeRange});
}
}
componentDidMount() {
this._animate();
}
componentWillUnmount() {
if (this._animationFrame) {
window.cancelAnimationFrame(this._animationFrame);
}
}
_animate() {
const {
loopLength = 1000, // unit corresponds to the timestamp in source data
animationSpeed = 20 // unit time per second
} = this.props;
const timestamp = Date.now() / 1000;
const loopTime = loopLength / animationSpeed;
this.setState({
time: ((timestamp % loopTime) / loopTime) * loopLength
});
this._animationFrame = window.requestAnimationFrame(this._animate.bind(this));
}
_getTimeRange(data) {
if (!data) {
return null;
}
return data.reduce(
(range, d) => {
const t = d.timestamp / MS_PER_DAY;
range[0] = Math.min(range[0], t);
range[1] = Math.max(range[1], t);
return range;
},
[Infinity, -Infinity]
);
}
_onHover({x, y, object}) {
this.setState({x, y, hoveredObject: object});
}
_renderLayers() {
const {
buildings = DATA_URL1.BUILDINGS,
trips = DATA_URL1.TRIPS,
trailLength = 30,
theme = DEFAULT_THEME
} = this.props;
const {data} = this.props;
const {filterValue} = this.state;
return [
data &&
new ScatterplotLayer({
id: 'earthquakes',
data,
opacity: 0.8,
radiusScale: 1,
radiusMinPixels: 1,
wrapLongitude: true,
getPosition: d => [d.longitude, d.latitude, -d.depth * 1000],
getRadius: d => d.VisitingTime * 200,
getFillColor: d => {
const r = Math.sqrt(Math.max(d.depth, 0));
return [255 - r * 15, r * 5, r * 10];
},
getFilterValue: d => d.timestamp / MS_PER_DAY, // in days
filterRange: [filterValue[0], filterValue[1]],
filterSoftRange: [
filterValue[0] * 0.9 + filterValue[1] * 0.1,
filterValue[0] * 0.1 + filterValue[1] * 0.9
],
extensions: [dataFilter],
pickable: true,
onHover: this._onHover
}),
new PolygonLayer({
id: 'ground',
data: landCover,
getPolygon: f => f,
stroked: false,
getFillColor: [0, 0, 0, 0]
}),
new TripsLayer({
id: 'trips',
data: trips,
getPath: d => d.path,
getTimestamps: d => d.timestamps,
getColor: d => (d.vendor === 0 ? theme.trailColor0 : theme.trailColor1),
opacity: 0.3,
widthMinPixels: 2,
rounded: true,
trailLength,
currentTime: this.state.time,
shadowEnabled: false
}),
new PolygonLayer({
id: 'buildings',
data: buildings,
extruded: true,
wireframe: false,
opacity: 0.5,
getPolygon: f => f.polygon,
getElevation: f => f.height,
getFillColor: theme.buildingColor,
material: theme.material
})
];
}
_renderTooltip() {
const {x, y, hoveredObject} = this.state;
return (
hoveredObject && (
<div className="tooltip" style={{top: y, left: x}}>
<div>
<b>Time: </b>
<span>{new Date(hoveredObject.timestamp).toUTCString()}</span>
</div>
<div>
<b>VisitingTime: </b>
<span>{hoveredObject.VisitingTime}</span>
</div>
<div>
<b>Depth: </b>
<span>{hoveredObject.depth} km</span>
</div>
</div>
)
);
}
_formatLabel(t) {
const date = new Date(t * MS_PER_DAY);
return `${date.getUTCFullYear()}/${date.getUTCMonth() + 1}`;
}
render() {
const {
viewState,
mapStyle = 'mapbox://styles/mapbox/light-v9',
theme = DEFAULT_THEME
} = this.props;
const {timeRange, filterValue} = this.state;
return (
<Fragment>
<DeckGL
views={MAP_VIEW}
layers={this._renderLayers()}
effects={theme.effects}
initialViewState={INITIAL_VIEW_STATE}
viewState={viewState}
controller={true}
>
<StaticMap
reuseMaps
mapStyle={mapStyle}
preventStyleDiffing={true}
mapboxApiAccessToken={MAPBOX_TOKEN}
/>
{this._renderTooltip}
</DeckGL>
{timeRange && (
<RangeInput
min={timeRange[0]}
max={timeRange[1]}
value={filterValue}
formatLabel={this._formatLabel}
onChange={({value}) => this.setState({filterValue: value})}
/>
)}
</Fragment>
);
}
}
export function renderToDOM(container) {
require('d3-request').csv(DATA_URL, (error, response) => {
if (!error) {
const data = response.map(row => ({
timestamp: new Date(`${row.DateTime} UTC`).getTime(),
latitude: Number(row.Latitude),
longitude: Number(row.Longitude),
depth: Number(row.Depth),
VisitingTime: Number(row.VisitingTime)
}));
render(<App data={data} />, container);
}
});
}
enter image description here
(2) The following code and screenshots are that I put setstate in setinterval and added a timer to implement a method called tick (). The timer component will call it every second. Did not move. You can see in the browser ’s developer tools that time is updated in real time, but filterValue is not updated (filterValue: Called to retrieve the value for each object that it will be filtered by. Returns either a number (if filterSize: 1) or an array. Here is filter by timestamp). There may be a small step that is not set correctly. Thank you very much for your advice ~
import React, {Component, Fragment} from 'react';
import {render} from 'react-dom';
import {StaticMap} from 'react-map-gl';
import {AmbientLight, PointLight, LightingEffect} from '#deck.gl/core';
import DeckGL from '#deck.gl/react';
import {PolygonLayer} from '#deck.gl/layers';
import {TripsLayer} from '#deck.gl/geo-layers';
import {ScatterplotLayer} from '#deck.gl/layers';
import {DataFilterExtension} from '#deck.gl/extensions';
import {MapView} from '#deck.gl/core';
// Set your mapbox token here
const MAPBOX_TOKEN = process.env.MapboxAccessToken; // eslint-disable-line
// Source data CSV
const DATA_URL1 = {
TRIPS:
'./package1.json' // eslint-disable-line
};
const DATA_URL =
'./data2.csv'; // eslint-disable-line
const MAP_VIEW = new MapView({
// 1 is the distance between the camera and the ground
farZMultiplier: 100
});
const ambientLight = new AmbientLight({
color: [122, 122, 122],
intensity: 1.0
});
const pointLight = new PointLight({
color: [255, 255, 255],
intensity: 2.0,
position: [127.05, 37.5, 8000]
});
const lightingEffect = new LightingEffect({ambientLight, pointLight});
const material = {
ambient: 0.1,
diffuse: 0.6,
shininess: 32,
specularColor: [60, 64, 70]
};
const DEFAULT_THEME = {
buildingColor: [74, 80, 87],
trailColor0: [253, 128, 93],
trailColor1: [23, 184, 190],
material,
effects: [lightingEffect]
};
const INITIAL_VIEW_STATE = {
longitude: 126.9779692,
latitude: 37.566535,
zoom: 6,
pitch: 0,
bearing: 0
};
const landCover = [[[-74.0, 40.7], [-74.02, 40.7], [-74.02, 40.72], [-74.0, 40.72]]];
const MS_PER_DAY = 8.64e7; // milliseconds in a day
const dataFilter = new DataFilterExtension({filterSize: 1});
export default class App extends Component {
constructor(props) {
super(props);
this.state = {date: new Date()};
const timeRange = this._getTimeRange(props.data);
this.state = {
timeRange,
filterValue: timeRange,
};
}
componentWillReceiveProps(nextProps) {
if (nextProps.data !== this.props.data) {
const timeRange = this._getTimeRange(nextProps.data);
this.setState({timeRange, filterValue: timeRange});
}
}
componentDidMount() {
this._animate();
this.timerID = setInterval(
() => this.tick(),
1000
);
}
componentWillUnmount() {
if (this._animationFrame) {
window.cancelAnimationFrame(this._animationFrame);
clearInterval(this.timerID);
}
}
tick() {
this.setState({
date: new Date()
});
}
_animate() {
const {
loopLength = 1000, // unit corresponds to the timestamp in source data
animationSpeed = 20 // unit time per second
} = this.props;
const timestamp = Date.now() / 1000;
const loopTime = loopLength / animationSpeed;
this.setState({
time: ((timestamp % loopTime) / loopTime) * loopLength
});
this._animationFrame = window.requestAnimationFrame(this._animate.bind(this));
}
_getTimeRange(data) {
if (!data) {
return null;
}
return data.reduce(
(range, d) => {
const t = d.timestamp / MS_PER_DAY;
range[0] = Math.min(range[0], t);
range[1] = Math.max(range[1], t);
return range;
},
[Infinity, -Infinity]
);
}
_renderLayers() {
const {
buildings = DATA_URL1.BUILDINGS,
trips = DATA_URL1.TRIPS,
trailLength = 30,
theme = DEFAULT_THEME
} = this.props;
const {data} = this.props;
const {filterValue} = this.state;
return [
data &&
new ScatterplotLayer({
id: 'ScatterplotLayer',
data,
opacity: 0.8,
radiusScale: 1,
radiusMinPixels: 1,
wrapLongitude: true,
rounded: true,
getTimestamps: d => d.timestamps,
getPosition: d => [d.longitude, d.latitude],
getRadius: d => d.VisitingTime * 200,
getFillColor: d => {
const r = Math.sqrt(Math.max(d.depth, 0));
return [255 - r * 15, r * 5, r * 10];
},
getFilterValue: d => d.timestamp / MS_PER_DAY, // in days
currentTime: this.state.time,
filterRange: [filterValue[0], filterValue[1]],
filterSoftRange: [
filterValue[0] * 0.9 + filterValue[1] * 0.1,
filterValue[0] * 0.1 + filterValue[1] * 0.9
],
extensions: [dataFilter]
}),
new PolygonLayer({
id: 'ground',
data: landCover,
getPolygon: f => f,
stroked: false,
getFillColor: [0, 0, 0, 0]
}),
new TripsLayer({
id: 'trips',
data: trips,
getPath: d => d.path,
getTimestamps: d => d.timestamps,
getColor: d => (d.vendor === 0 ? theme.trailColor0 : theme.trailColor1),
opacity: 0.3,
widthMinPixels: 2,
rounded: true,
trailLength,
currentTime: this.state.time,
shadowEnabled: false
}),
new PolygonLayer({
id: 'buildings',
data: buildings,
extruded: true,
wireframe: false,
opacity: 0.5,
getPolygon: f => f.polygon,
getElevation: f => f.height,
getFillColor: theme.buildingColor,
material: theme.material
})
];
}
render() {
const {
viewState,
mapStyle = 'mapbox://styles/mapbox/light-v9',
theme = DEFAULT_THEME
} = this.props;
return (
<Fragment>
<DeckGL
views={MAP_VIEW}
layers={this._renderLayers()}
effects={theme.effects}
initialViewState={INITIAL_VIEW_STATE}
viewState={viewState}
controller={true}
>
<StaticMap
reuseMaps
mapStyle={mapStyle}
preventStyleDiffing={true}
mapboxApiAccessToken={MAPBOX_TOKEN}
/>
</DeckGL>
</Fragment>
);
}
}
export function renderToDOM(container) {
require('d3-request').csv(DATA_URL, (error, response) => {
if (!error) {
const data = response.map(row => ({
timestamp: new Date(`${row.DateTime} UTC`).getTime(),
latitude: Number(row.Latitude),
longitude: Number(row.Longitude),
depth: Number(row.Depth),
VisitingTime: Number(row.VisitingTime)
}));
render(<App data={data} />, container);
}
});
}
enter image description here

Categories

Resources