I want to restart a circular progress bar after 30 seconds - javascript

const useProgress = (maxTimeInSeconds = 30) => {
const [elapsedTime, setElapsedTime] = useState(0);
const [progress, setProgress] = useState(0);
useEffect(() => {
const intervalId = setInterval((callback) => {
if (progress < 1) {
setElapsedTime((t) => t + 1);
}
}, 1000);
return () => clearInterval(intervalId);
}, []);
useEffect(() => {
setProgress(elapsedTime / maxTimeInSeconds);
console.log(elapsedTime);
}, [elapsedTime]);
return progress;
};
const progress = useProgress();
Here I have implemented logic to move the circular progress bar for 30 seconds. What I want to do is restart the progress after 30 seconds and keep it in the loop for 30 seconds.
here I have pasted the whole code
import { Observer } from "mobx-react";
import React, { Component, useEffect, useState } from "react";
import {
StyleSheet,
Text,
View,
ImageBackground,
TouchableOpacity,
Image,
Platform,
Dimensions,
Clipboard,
} from "react-native";
import * as Progress from "react-native-progress";
import AppStore from "../../stores/AppStore";
import Iconpack from "../../utils/Iconpack";
import Theme from "../../utils/Theme";
import DeviceInfo from "react-native-device-info";
import CircularProgressBar from "./CircularProgressBar";
import { Shadow } from "react-native-shadow-2";
import Toast from "react-native-simple-toast";
import { BoxShadow } from "react-native-shadow";
import FastImage from "react-native-fast-image";
// import Clipboard from "#react-native-community/clipboard";
const hRem = AppStore.screenHeight / 812;
const wRem = AppStore.screenWidth / 375;
const OtpDetails = ({ route, params, navigation }) => {
const { id, title } = route.params;
const useProgress = (maxTimeInSeconds = 30) => {
const [elapsedTime, setElapsedTime] = useState(0);
const [progress, setProgress] = useState(0);
useEffect(() => {
const intervalId = setInterval((callback) => {
if (progress < 1) {
setElapsedTime((t) => t + 1);
}
}, 1000);
return () => clearInterval(intervalId);
}, []);
useEffect(() => {
console.log(elapsedTime);
setProgress(elapsedTime / maxTimeInSeconds);
if (elapsedTime == 30) {
setProgress(elapsedTime / maxTimeInSeconds);
}
}, [elapsedTime / maxTimeInSeconds]);
return progress;
};
const progress = useProgress();
const setTextIntoClipboard = async () => {
await Clipboard.setString(title);
Toast.show("OTP copied successfully!", 200);
};
const deviceName = DeviceInfo.getModel();
return (
<Observer>
{() => (
<View style={{ flex: 1 }}>
<FastImage
style={styles.container}
source={Iconpack.GLOBAL_BG}
resizeMode={FastImage.resizeMode.cover}
/>
<View style={styles.headerStyle}>
<TouchableOpacity
onPress={() => {
navigation.navigate("OtpScreen");
}}
>
<Image
style={styles.backButton}
source={Iconpack.GLOBAL_BACK_BUTTON}
/>
</TouchableOpacity>
<Text style={styles.headerTitle}>{title}</Text>
<View style={{ flexDirection: "row" }}>
<TouchableOpacity>
<Image
style={[styles.editTrashButton, { marginRight: 19 }]}
source={Iconpack.EDIT_ICON}
/>
</TouchableOpacity>
<TouchableOpacity>
<Image
style={styles.editTrashButton}
source={Iconpack.DELETE_ICON}
/>
</TouchableOpacity>
</View>
</View>
<View style={styles.circleOuter}>
{Platform.OS === "ios" ? (
<View style={styles.circleContainer}>
<Progress.Circle
style={{ alignItems: "center" }}
progress={progress}
// indeterminate={this.state.indeterminate}
size={Dimensions.get("window").width * 0.561}
thickness={10}
borderWidth={0}
endAngle={10}
strokeCap="round"
color={"#354659"}
// indeterminateAnimationDuration={1}
unfilledColor={"#031830"}
/>
</View>
) : (
<Shadow
distance={20}
startColor={"rgba(27, 100, 206, 0.5)"}
radius={210}
safeRender={true}
>
<View style={styles.circleContainer}>
<Progress.Circle
style={{ alignItems: "center", overflow: "hidden" }}
progress={progress}
// indeterminate={this.state.indeterminate}
size={Dimensions.get("window").width * 0.561}
thickness={10}
borderWidth={0}
endAngle={10}
strokeCap="round"
color={"#354659"}
indeterminateAnimationDuration={2000}
unfilledColor={"#031830"}
/>
</View>
</Shadow>
)}
<Text style={[styles.circleItem]}>{title}</Text>
<TouchableOpacity
onPress={() => {
setTextIntoClipboard();
}}
style={styles.copyItem}
>
<Text style={styles.copyText}>TAP TO COPY</Text>
</TouchableOpacity>
</View>
{/* <View style={styles.circleOuter}>
<View style={styles.circleContainer}>
<Text style={styles.circleItem}>title</Text>
</View>
<TouchableOpacity
onPress={() => {
copyToClipboard();
}}
style={styles.copyItem}
>
<Text style={styles.copyText}>TAP TO COPY</Text>
</TouchableOpacity> */}
{/* </View> */}
</View>
)}
</Observer>
);
};
export default OtpDetails;
const styles = StyleSheet.create({
container: {
flex: 1,
position: "absolute",
top: 0,
right: 0,
left: 0,
bottom: 0,
backgroundColor: "#021831",
},
headerStyle: {
flexDirection: "row",
marginTop: Platform.OS === "ios" ? 56 : 40,
marginHorizontal: wRem * 26.66,
justifyContent: "space-between",
alignItems: "center",
},
backButton: {
width: wRem * 12,
height: hRem * 21,
},
editTrashButton: {
width: wRem * 23,
height: hRem * 23,
},
headerTitle: {
...Theme.encodeSansMed2,
color: "#FFFFFF",
fontWeight: "600",
marginLeft: wRem * 50,
},
circleOuter: {
flex: 1,
justifyContent: "center",
alignItems: "center",
},
circleContainer: {
borderRadius:
Math.round(
Dimensions.get("window").width + Dimensions.get("window").height
) / 2,
width: Dimensions.get("window").width * 0.56,
height: Dimensions.get("window").width * 0.56,
justifyContent: "center",
backgroundColor: "black",
shadowColor: "rgba(27, 100, 206, 0.5)",
shadowOffset: {
width: 0,
height: 0,
},
shadowOpacity: 30,
shadowRadius: 30,
},
circleItem: {
color: "white",
...Theme.encodeSansMed7,
textAlign: "center",
position: "absolute",
width: "100%",
paddingBottom: 100,
},
copyItem: {
justifyContent: "center",
alignItems: "center",
marginTop: hRem * 64,
},
copyText: {
color: "#3D4756",
...Theme.encodeSansMed4,
},
});
with all styling and the most important thing is I am using third party party library react-native-progress thanks

progress does not need to be a state atom since it's always unambiguously calculable from the elapsed time and max time.
Using the modulo (remainder) operator you can have the observed progress always loop back to 0 after the maximum time.
const useProgress = (maxTimeInSeconds = 30) => {
const [elapsedTime, setElapsedTime] = useState(0);
useEffect(() => {
const intervalId = setInterval(() => {
setElapsedTime((t) => t + 1);
}, 1000);
return () => clearInterval(intervalId);
}, []);
return (elapsedTime % maxTimeInSeconds) / maxTimeInSeconds;
};
const progress = useProgress();
Alternately, if you want slightly more accurate, wallclock-based timing:
const useProgress = (maxTimeInSeconds = 30) => {
const [initialTime] = useState(() => +new Date());
const [counter, setCounter] = useState(0);
useEffect(() => {
const intervalId = setInterval(() => {
setCounter((t) => t + 1);
}, 1000);
return () => clearInterval(intervalId);
}, []);
const elapsedTime = Math.floor(((+new Date()) - initialTime) / 1000);
if (elapsedTime == 30) {
return maxTimeInSeconds;
}
return (elapsedTime % maxTimeInSeconds) / maxTimeInSeconds;
};
This pins the progress start to the component's mount time, and should work better even when setInterval isn't accurate.

Related

SOLVED Expo-Camera, takePictureAsync undefined (Unhandled promise rejection: TypeError: ref.current.takePictureAsync is not a function)

I'm trying to create react app with expo using expo-camera for taking pictures. I have separately components MeasurementCameraScreen and MeasurementCamera. I'm using useRef() hook to be able to call takePictureAsync() from the MeasuremenCameraScreen.
When pressing the take image -button takePicture() console.logs the ref, so I assume the onPress gets there, but then I get the following error message:
[Unhandled promise rejection: TypeError: ref.current.takePictureAsync is not a function. (In 'ref.current.takePictureAsync(options)', 'ref.current.takePictureAsync' is undefined)]
I saw that people have also had same issues with takePictureAcync(), but I haven't found solution to my problem. I also tired to combine the MeasurementCameraScreen and MeasurementCamera components to one component, and with that I got the camera working, but I'm curious of why it doesn't work now? refs are new thing for me so I think there is something wrong with them.
Here are the components:
MeasurementCameraScreen
import { useRef } from 'react'
import { StyleSheet, TouchableOpacity, View } from 'react-native'
import MeasurementCamera from '../components/MeasurementCamera'
import Text from '../components/Text'
const MeasurementCameraScreen = () => {
const cameraRef = useRef(null)
return (
<View style={styles.container}>
<View style={styles.cameraContainer}>
<MeasurementCamera ref={cameraRef}/>
</View>
<View>
</View>
<TouchableOpacity
onPress={() => cameraRef.current.takePicture()}
style={styles.buttonContainer}
>
<Text>
Take image
</Text>
</TouchableOpacity>
</View>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
},
cameraContainer: {
flex: 1,
},
buttonContainer: {
width: '100%',
height: 70,
backgroundColor: 'white',
justifyContent: 'center',
alignItems: 'center',
alignSelf: 'flex-end'
},
})
export default MeasurementCameraScreen
MeasurementCamera
import { useState, useEffect, useImperativeHandle, forwardRef } from 'react'
import { StyleSheet } from "react-native"
import { Camera } from 'expo-camera'
import Text from './Text'
const MeasurementCamera = forwardRef((props, ref) => {
const [hasPermission, setHasPermission] = useState(null)
useEffect(() => {
const getPermission = async () => {
const { status } = await Camera.requestCameraPermissionsAsync()
setHasPermission(status === 'granted')
}
getPermission()
}, [])
const takePicture = async () => {
if (ref) {
console.log(ref.current)
const options = {
quality: 1,
base64: true
}
const picture = await ref.current.takePictureAsync(options)
console.log(picture.uri)
}
}
useImperativeHandle(ref, () => ({
takePicture
}))
if (hasPermission === null) {
return <Text>Requesting for camera permission</Text>
} if (hasPermission === false) {
return <Text>No access to camera</Text>
}
return (
<Camera
ref={ref}
style={StyleSheet.absoluteFillObject}
/>
)
})
MeasurementCamera.displayName = 'MeasurementCamera'
export default MeasurementCamera
EDIT: Solved the problem, check out the comment for solution! :)
Okay I found the solution!
After reading the medium article #LouaySleman recommended about the expo-camera I understood that to be able to use the Expo Camera components functions I need to use ref. So what I needed was two refs, one for the components to communicate and another one to be able to use the Camera takePictureAsync() function.
Now that we have permissions to access the Camera, you should get familiar with the ref props on line 132, in the Camera component. There we have passed the cameraRef that was previously defined with useRef. In doing so, we will have access to interesting methods that we can call to control the Camera.
What you needed is to add two refs, one for the components to communicate and another one to be able to use the Camera takePictureAsync() function Please check this example from medium:
import React, { useState, useRef, useEffect } from "react";
import {
View,
Text,
TouchableOpacity,
SafeAreaView,
StyleSheet,
Dimensions,
} from "react-native";
import { Camera } from "expo-camera";
import { Video } from "expo-av";
export default function CameraScreen() {
const [hasPermission, setHasPermission] = useState(null);
const [cameraType, setCameraType] = useState(Camera.Constants.Type.back);
const [isPreview, setIsPreview] = useState(false);
const [isCameraReady, setIsCameraReady] = useState(false);
const [isVideoRecording, setIsVideoRecording] = useState(false);
const [videoSource, setVideoSource] = useState(null);
const cameraRef = useRef();
useEffect(() => {
(async () => {
const { status } = await Camera.requestPermissionsAsync();
setHasPermission(status === "granted");
})();
}, []);
const onCameraReady = () => {
setIsCameraReady(true);
};
const takePicture = async () => {
if (cameraRef.current) {
const options = { quality: 0.5, base64: true, skipProcessing: true };
const data = await cameraRef.current.takePictureAsync(options);
const source = data.uri;
if (source) {
await cameraRef.current.pausePreview();
setIsPreview(true);
console.log("picture", source);
}
}
};
const recordVideo = async () => {
if (cameraRef.current) {
try {
const videoRecordPromise = cameraRef.current.recordAsync();
if (videoRecordPromise) {
setIsVideoRecording(true);
const data = await videoRecordPromise;
const source = data.uri;
if (source) {
setIsPreview(true);
console.log("video source", source);
setVideoSource(source);
}
}
} catch (error) {
console.warn(error);
}
}
};
const stopVideoRecording = () => {
if (cameraRef.current) {
setIsPreview(false);
setIsVideoRecording(false);
cameraRef.current.stopRecording();
}
};
const switchCamera = () => {
if (isPreview) {
return;
}
setCameraType((prevCameraType) =>
prevCameraType === Camera.Constants.Type.back
? Camera.Constants.Type.front
: Camera.Constants.Type.back
);
};
const cancelPreview = async () => {
await cameraRef.current.resumePreview();
setIsPreview(false);
setVideoSource(null);
};
const renderCancelPreviewButton = () => (
<TouchableOpacity onPress={cancelPreview} style={styles.closeButton}>
<View style={[styles.closeCross, { transform: [{ rotate: "45deg" }] }]} />
<View
style={[styles.closeCross, { transform: [{ rotate: "-45deg" }] }]}
/>
</TouchableOpacity>
);
const renderVideoPlayer = () => (
<Video
source={{ uri: videoSource }}
shouldPlay={true}
style={styles.media}
/>
);
const renderVideoRecordIndicator = () => (
<View style={styles.recordIndicatorContainer}>
<View style={styles.recordDot} />
<Text style={styles.recordTitle}>{"Recording..."}</Text>
</View>
);
const renderCaptureControl = () => (
<View style={styles.control}>
<TouchableOpacity disabled={!isCameraReady} onPress={switchCamera}>
<Text style={styles.text}>{"Flip"}</Text>
</TouchableOpacity>
<TouchableOpacity
activeOpacity={0.7}
disabled={!isCameraReady}
onLongPress={recordVideo}
onPressOut={stopVideoRecording}
onPress={takePicture}
style={styles.capture}
/>
</View>
);
if (hasPermission === null) {
return <View />;
}
if (hasPermission === false) {
return <Text style={styles.text}>No access to camera</Text>;
}
return (
<SafeAreaView style={styles.container}>
<Camera
ref={cameraRef}
style={styles.container}
type={cameraType}
flashMode={Camera.Constants.FlashMode.on}
onCameraReady={onCameraReady}
onMountError={(error) => {
console.log("camera error", error);
}}
/>
<View style={styles.container}>
{isVideoRecording && renderVideoRecordIndicator()}
{videoSource && renderVideoPlayer()}
{isPreview && renderCancelPreviewButton()}
{!videoSource && !isPreview && renderCaptureControl()}
</View>
</SafeAreaView>
);
}
const WINDOW_HEIGHT = Dimensions.get("window").height;
const closeButtonSize = Math.floor(WINDOW_HEIGHT * 0.032);
const captureSize = Math.floor(WINDOW_HEIGHT * 0.09);
const styles = StyleSheet.create({
container: {
...StyleSheet.absoluteFillObject,
},
closeButton: {
position: "absolute",
top: 35,
left: 15,
height: closeButtonSize,
width: closeButtonSize,
borderRadius: Math.floor(closeButtonSize / 2),
justifyContent: "center",
alignItems: "center",
backgroundColor: "#c4c5c4",
opacity: 0.7,
zIndex: 2,
},
media: {
...StyleSheet.absoluteFillObject,
},
closeCross: {
width: "68%",
height: 1,
backgroundColor: "black",
},
control: {
position: "absolute",
flexDirection: "row",
bottom: 38,
width: "100%",
alignItems: "center",
justifyContent: "center",
},
capture: {
backgroundColor: "#f5f6f5",
borderRadius: 5,
height: captureSize,
width: captureSize,
borderRadius: Math.floor(captureSize / 2),
marginHorizontal: 31,
},
recordIndicatorContainer: {
flexDirection: "row",
position: "absolute",
top: 25,
alignSelf: "center",
justifyContent: "center",
alignItems: "center",
backgroundColor: "transparent",
opacity: 0.7,
},
recordTitle: {
fontSize: 14,
color: "#ffffff",
textAlign: "center",
},
recordDot: {
borderRadius: 3,
height: 6,
width: 6,
backgroundColor: "#ff0000",
marginHorizontal: 5,
},
text: {
color: "#fff",
},
});

How to make image draggable within the view parent in react-native?

I have a FlatList carousel and I'm using react-native-gesture-handler to create a draggable zoom in effect for the image. The zoom in and dragging works, however I'm having problems setting a boundary for the image as it is overlapped by the next image once I zoom in and the dragging will go all the way to the end of the slider. I found a couple of examples where you can achieve this, but all of them were with react-native-reanimated. This is my code so far:
const { height, width } = Dimensions.get('window');
console.log("height", height, width)
const ITEM_HEIGHT = height * 0.7
const ITEM_WIDTH = height * 0.46
const Preview = ({ navigation, route }) => {
const { item, goBack } = route.params;
const buttonRef = React.useRef();
const ref = useRef()
const selectedPhotoIndex = data.findIndex((i) => i.id === item.id)
const scrollX = useRef(new Animated.Value(selectedPhotoIndex)).current
const scale = useRef(new Animated.Value(1)).current
const panRef = useRef()
const pinchRef = useRef()
const translationX = useRef(new Animated.Value(0)).current
const translationY = useRef(new Animated.Value(0)).current
const [scrollEnabled, setSCrollEnabled] = useState(true)
const goToIndex = useCallback((info) => {
const wait = new Promise(resolve => setTimeout(resolve, 200));
wait.then(() => {
ref.current?.scrollToIndex({ index: info.index, animated: true });
})
})
const pinchEvent = Animated.event([{ nativeEvent: { scale: scale } }], { useNativeDriver: true })
const onStateChange = (event) => {
if (event.nativeEvent.state === State.ACTIVE) {
setSCrollEnabled(false)
}
const nScale = event.nativeEvent.scale
if (event.nativeEvent.state === State.END || event.nativeEvent.oldState === State.ACTIVE) {
if (nScale < 1) {
Animated.spring(scale, {
toValue: 1,
useNativeDriver: true
}).start()
translationX.setOffset(0)
translationY.setOffset(0)
setSCrollEnabled(true)
}
} else if (event.nativeEvent.state === State.ACTIVE) {
setSCrollEnabled(false)
}
}
const panEvent = Animated.event([{
nativeEvent: {
translationX,
translationY
}
}], { useNativeDriver: true })
const onChange = (event) => {
translationX.extractOffset()
translationY.extractOffset()
if (event.nativeEvent.oldState === State.ACTIVE) {
}
}
const RenderItem = ({ item, translateX, viewLimit }) => {
return (
<View style={{ height, justifyContent: 'center', alignItems: 'center', alignContent: 'center' }}>
<PanGestureHandler failOffsetY={[-500, 500]}
failOffsetX={[-500, 500]}
enabled={!scrollEnabled}
ref={panRef}
simultaneousHandlers={[pinchRef]}
onHandlerStateChange={onChange}
onGestureEvent={panEvent}>
<Animated.View style={{
height, transform: [
{ translateX },
{ scale }
],
}}>
<SharedElement id={`item.${item.id}.image_url`}>
<PinchGestureHandler ref={pinchRef}
simultaneousHandlers={[panRef]}
onGestureEvent={pinchEvent}
onHandlerStateChange={onStateChange}>
<Animated.Image
source={{ uri: item.image_url }}
style={{
width: ITEM_WIDTH,
height: ITEM_HEIGHT,
transform: [
{ translateX: translationX },
{ translateY: translationY }
]
}}
resizeMode='contain'
/>
</PinchGestureHandler>
</SharedElement>
</Animated.View>
</PanGestureHandler>
<View
style={{ flexDirection: 'row', marginTop: 10, paddingHorizontal: 20 }}
>
<View style={{ flexDirection: 'column', paddingLeft: 6 }}>
<SharedElement id={`item.${item.id}.title`}>
<Text
style={{
color: 'white',
fontSize: 24,
fontWeight: 'bold',
lineHeight: 28
}}
>
{item.title}
</Text>
</SharedElement>
<SharedElement id={`item.${item.id}.description`}>
<Text
style={{
color: 'white',
fontSize: 16,
fontWeight: 'bold',
lineHeight: 18
}}
>
{item.description}
</Text>
</SharedElement>
</View>
</View>
</View>
)
}
return (
<View style={{ flex: 1, backgroundColor: '#0f0f0f', overflow: 'hidden' }}>
<BackButton navigation={navigation} />
<Animated.FlatList
onScroll={Animated.event([{ nativeEvent: { contentOffset: { x: scrollX } } }],
{ useNativeDriver: true })}
pagingEnabled
nestedScrollEnabled={true}
snapToInterval={ITEM_WIDTH}
decelerationRate="normal"
scrollEnabled={scrollEnabled}
horizontal
ref={ref}
data={data}
keyExtractor={(item) => item.id}
initialScrollIndex={selectedPhotoIndex}
onScrollToIndexFailed={info => goToIndex(info)}
renderItem={({ item: details, index }) => {
const inputRange = [
(index - 1) * ITEM_WIDTH,
index * ITEM_WIDTH,
(index + 1) * ITEM_WIDTH
]
const swipeX = scrollX.interpolate({
inputRange,
outputRange: [ITEM_WIDTH, 0, -ITEM_WIDTH]
})
const viewLimit = translationX.interpolate({
inputRange,
outputRange: [ITEM_WIDTH, 0, -ITEM_WIDTH / 2]
})
return (
<RenderItem viewLimit={viewLimit} translateX={swipeX} item={details} />
)
}} />
</View>
);
};
How I can zoom in and contain the image dragging within the view, without the image being overlapped by the others?I've tried to change the gesture handler based on this topic https://github.com/software-mansion/react-native-gesture-handler/issues/110 but it didn't work for me. Any help would be appreciated!

React Native Expo-Camera recording video find problem User rejected audio permission needed

I'm having trouble recording video with expo-camera. User rejected audio permission needed. Has anyone had such a problem?? If anyone tells me the way to solve the problem, I also added expo install expo-permissions. But it still doesn't work
taking snap working fine just problem through recoding video...Here is my code.
import React, { useState, useRef, useEffect } from "react";
import {
StyleSheet,
Dimensions,
View,
Text,
TouchableOpacity,
SafeAreaView,
} from "react-native";
import { Camera } from "expo-camera";
import { Video } from "expo-av";
const WINDOW_HEIGHT = Dimensions.get("window").height;
const closeButtonSize = Math.floor(WINDOW_HEIGHT * 0.032);
const captureSize = Math.floor(WINDOW_HEIGHT * 0.09);
export default function App() {
const [hasPermission, setHasPermission] = useState(null);
const [cameraType, setCameraType] = useState(Camera.Constants.Type.back);
const [isPreview, setIsPreview] = useState(false);
const [isCameraReady, setIsCameraReady] = useState(false);
const [isVideoRecording, setIsVideoRecording] = useState(false);
const [videoSource, setVideoSource] = useState(null);
const cameraRef = useRef();
useEffect(() => {
(async () => {
const { status } = await Camera.requestPermissionsAsync()
setHasPermission(status === "granted");
})();
}, []);
const onCameraReady = () => {
setIsCameraReady(true);
};
const takePicture = async () => {
if (cameraRef.current) {
const options = { quality: 0.5, base64: true, skipProcessing: true };
const data = await cameraRef.current.takePictureAsync(options);
const source = data.uri;
if (source) {
await cameraRef.current.pausePreview();
setIsPreview(true);
console.log("picture source", source);
}
}
};
const recordVideo = async () => {
if (cameraRef.current) {
try {
const videoRecordPromise = cameraRef.current.recordAsync();
if (videoRecordPromise) {
setIsVideoRecording(true);
const data = await videoRecordPromise;
const source = data.uri;
if (source) {
setIsPreview(true);
console.log("video source", source);
setVideoSource(source);
}
}
} catch (error) {
console.warn(error);
}
}
};
const stopVideoRecording = () => {
if (cameraRef.current) {
setIsPreview(false);
setIsVideoRecording(false);
cameraRef.current.stopRecording();
}
};
const switchCamera = () => {
if (isPreview) {
return;
}
setCameraType((prevCameraType) =>
prevCameraType === Camera.Constants.Type.back
? Camera.Constants.Type.front
: Camera.Constants.Type.back
);
};
const cancelPreview = async () => {
await cameraRef.current.resumePreview();
setIsPreview(false);
setVideoSource(null);
};
const renderCancelPreviewButton = () => (
<TouchableOpacity onPress={cancelPreview} style={styles.closeButton}>
<View style={[styles.closeCross, { transform: [{ rotate: "45deg" }] }]} />
<View
style={[styles.closeCross, { transform: [{ rotate: "-45deg" }] }]}
/>
</TouchableOpacity>
);
const renderVideoPlayer = () => (
<Video
source={{ uri: videoSource }}
shouldPlay={true}
style={styles.media}
/>
);
const renderVideoRecordIndicator = () => (
<View style={styles.recordIndicatorContainer}>
<View style={styles.recordDot} />
<Text style={styles.recordTitle}>{"Recording..."}</Text>
</View>
);
const renderCaptureControl = () => (
<View style={styles.control}>
<TouchableOpacity disabled={!isCameraReady} onPress={switchCamera}>
<Text style={styles.text}>{"Flip"}</Text>
</TouchableOpacity>
<TouchableOpacity
activeOpacity={0.7}
disabled={!isCameraReady}
onLongPress={recordVideo}
onPressOut={stopVideoRecording}
onPress={takePicture}
style={styles.capture}
/>
</View>
);
if (hasPermission === null) {
return <View />;
}
if (hasPermission === false) {
return <Text style={styles.text}>No access to camera</Text>;
}
return (
<SafeAreaView style={styles.container}>
<Camera
ref={cameraRef}
style={styles.container}
type={cameraType}
flashMode={Camera.Constants.FlashMode.on}
onCameraReady={onCameraReady}
onMountError={(error) => {
console.log("cammera error", error);
}}
/>
<View style={styles.container}>
{isVideoRecording && renderVideoRecordIndicator()}
{videoSource && renderVideoPlayer()}
{isPreview && renderCancelPreviewButton()}
{!videoSource && !isPreview && renderCaptureControl()}
</View>
</SafeAreaView>
);
}
const styles = StyleSheet.create({
container: {
...StyleSheet.absoluteFillObject,
},
closeButton: {
position: "absolute",
top: 35,
left: 15,
height: closeButtonSize,
width: closeButtonSize,
borderRadius: Math.floor(closeButtonSize / 2),
justifyContent: "center",
alignItems: "center",
backgroundColor: "#c4c5c4",
opacity: 0.7,
zIndex: 2,
},
media: {
...StyleSheet.absoluteFillObject,
},
closeCross: {
width: "68%",
height: 1,
backgroundColor: "black",
},
control: {
position: "absolute",
flexDirection: "row",
bottom: 38,
width: "100%",
alignItems: "center",
justifyContent: "center",
},
capture: {
backgroundColor: "#f5f6f5",
borderRadius: 5,
height: captureSize,
width: captureSize,
borderRadius: Math.floor(captureSize / 2),
marginHorizontal: 31,
},
recordIndicatorContainer: {
flexDirection: "row",
position: "absolute",
top: 25,
alignSelf: "center",
justifyContent: "center",
alignItems: "center",
backgroundColor: "transparent",
opacity: 0.7,
},
recordTitle: {
fontSize: 14,
color: "#ffffff",
textAlign: "center",
},
recordDot: {
borderRadius: 3,
height: 6,
width: 6,
backgroundColor: "#ff0000",
marginHorizontal: 5,
},
text: {
color: "#fff",
},
});
Using expo-camera with expo-av you can do something like that:
const getPermissions = useCallback(
async () => {
const { status } = await Camera.requestPermissionsAsync();
await Audio.requestPermissionsAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
setHasPermission(status === 'granted');
},
[sound, hasPermission],
)
Now in the expo camera, we have to ask for mic permission separately from the user. You can update your code like this:
const [hasCameraPermission, setHasCameraPermission] = useState(false);
const [hasMicPermission, setHasMicPermission] = useState(false);
useEffect(()=>{
(async () => {
const camStatus = await Camera.requestCameraPermissionsAsync();
if (camStatus.granted){
sethasCameraPermission(true)
}
const micStatus = await Camera.requestMicrophonePermissionsAsync();
if (micStatus.granted){
setHasMicPermission(true);}
})();
},[])

How To Implement Auto Scrolling Video in React Native using JSX

import React, {useState, useEffect, useRef} from 'react';
import {
View,
Text,
StyleSheet,
FlatList,
ScrollView,
Image,
TouchableHighlight,
TextInput,
TouchableOpacity,
Modal,
Dimensions,
SafeAreaView,
} from 'react-native';
import Voice from 'react-native-voice';
import Video from 'react-native-video';
import SimpleLineIcons from 'react-native-vector-icons/SimpleLineIcons';
import MaterialCommunityIcons from 'react-native-vector-icons/MaterialCommunityIcons';
import {videos} from './data';
// code started creating state on every event which will be used
enter center export default function App() {
const [pitch, setPitch] = useState('');
const [error, setError] = useState('');
const [end, setEnd] = useState('');
const [started, setStarted] = useState('');
const [results, setResults] = useState([]);
const [videoData, setVideos] = useState([...videos]);
const [showModal, setShowModal] = useState(false);
const [paused, setPaused] = useState(true);
const [position, setPosition] = useState({start: null, end: null});
const [muted, setMuted] = useState(true);
//Setting callbacks for the process status
useEffect(() => {
Voice.onSpeechStart = onSpeechStart;
Voice.onSpeechEnd = onSpeechEnd;
Voice.onSpeechError = onSpeechError;
Voice.onSpeechResults = onSpeechResults;
return () => {
//destroy the process after switching the screen
Voice.destroy().then(Voice.removeAllListeners);
};
}, []);
const onSpeechStart = (e) => {
//Invoked when .start() is called without error
console.log('onSpeechStart: ', e);
setStarted('√');
};
const onSpeechEnd = (e) => {
//Invoked when SpeechRecognizer stops recognition
console.log('onSpeechEnd: ', e);
setEnd('√');
};
const onSpeechError = (e) => {
//Invoked when an error occurs.
console.log('onSpeechError: ', e);
setError(JSON.stringify(e.error));
};
2. in this part we are matching the word sopken by the user and checking is if the same video is available in our data if it si then the video will come on the top
const onSpeechResults = (e) => {
//Invoked when SpeechRecognizer is finished recognizing
console.log('onSpeechResults: ', e.value[0]);
let vResult = e.value[0];
let small = vResult.toLowerCase();
setResults([small]);
setShowModal(false);
for (let i = 0; i < videoData.length - 1; i++) {
let newArray = [...videos];
if (small == newArray[i].name) {
newArray.splice(0, 0, newArray[i]);
const mainArray = newArray.filter((item, index) => {
if (item.name == small && index != 0) {
} else {
return item;
}
});
setVideos(mainArray);
}
}
};
const startRecognizing = async () => {
console.log('start listenind');
//Starts listening for speech for a specific locale
try {
await Voice.start('en-US');
setPitch('');
setError('');
setStarted('');
setResults([]);
setEnd('');
} catch (e) {
//eslint-disable-next-line
console.error(e);
}
};
const stopRecognizing = async () => {
//Stops listening for speech
try {
await Voice.stop();
} catch (e) {
//eslint-disable-next-line
console.error(e);
}
};
3. making video play n pause is out of axis
onScroll = (event) => {
const scrollPosition = event.nativeEvent.contentOffset.y;
const paused = paused;
const {start, end} = position;
if (scrollPosition < start && scrollPosition > end && paused) {
setPaused(false);
} else if (scrollPosition > start && scrollPosition < end && !paused) {
setPaused(true);
} else if (scrollPosition < end && !paused) {
setPaused(true);
}
};
4. getting the scroll y axis
const threshold = 180;
onVideoLayOut = (event) => {
const {height} = Dimensions.get('window');
const start = -(event.nativeEvent.layout.y - height + threshold);
const end =
event.nativeEvent.layout.y + event.nativeEvent.layout.height - threshold;
// console.log(start, end, 'position');
setPosition((state) => {
return {
...state,
start: start,
end: end,
};
});
console.log(position, 'position1');
};
5.this the render par using faltlist to render data
return (
<View
style={{
flex: 1,
backgroundColor: 'black',
}}>
<View
style={{
height: 70,
alignItems: 'center',
justifyContent: 'space-between',
marginHorizontal: 15,
flexDirection: 'row',
marginVertical: 15,
marginTop: 25,
}}>
<View
style={{
height: 40,
borderRadius: 20,
width: 300,
borderWidth: 1,
borderColor: '#F23C29',
}}>
<TextInput
placeholder="Search ..."
style={{flex: 1, paddingHorizontal: 30, color: 'white'}}
placeholderTextColor="grey"
/>
</View>
<View>
<TouchableOpacity
onPress={() => {
startRecognizing();
setShowModal(true);
}}>
<SimpleLineIcons color={'white'} name="microphone" size={30} />
</TouchableOpacity>
</View>
</View>
<SafeAreaView
style={{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
borderColor: 'orange',
borderWidth: 1,
}}>
<FlatList
data={videoData}
// scrollEventThrottle={16}
onScroll={onScroll}
showsVerticalScrollIndicator={true}
renderItem={({item, index}) => {
return (
<View
key={index}
style={{
height: 250,
marginVertical: 20,
}}>
<Video
source={{uri: item.source}}
onLayout={onVideoLayOut}
muted={true}
resizeMode="cover"
paused={!paused}
style={{
width: '100%',
height: undefined,
aspectRatio: 2,
marginBottom: 200,
marginTop: 300,
}}
/>
<Text
style={{
fontSize: 20,
alignItems: 'center',
marginLeft: 20,
marginTop: 15,
color: 'white',
}}>
{item.title}
</Text>
</View>
);
}}
/>
</SafeAreaView>
{/* <ScrollView style={{flex: 2, width: '100%'}}>
{videoData.map((item, index) => {
return (
<View
key={index}
style={{
height: 250,
marginVertical: 20,
}}>
<Video
source={{uri: item.source}}
paused={false}
rate={1.0}
volume={1.0}
isMuted={false}
resizeMode="contain"
shouldPlay
isLooping
style={{width: '100%', height: undefined, aspectRatio: 2}}
onError={(e) => {
console.log(e, 'video data');
}}
/>
<Text
style={{
fontSize: 20,
alignItems: 'center',
marginLeft: 20,
marginTop: 15,
color: 'white',
}}>
{item.title}
</Text>
</View>
);
})}
</ScrollView> */}
<Modal
visible={showModal}
transparent={true}
onRequestClose={() => {
setShowModal(false);
}}>
<View
style={{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'rgba(0,0,0,0.9)',
}}>
<MaterialCommunityIcons
name="microphone-settings"
size={45}
color="white"
/>
<Text
style={{
color: '#fff',
textAlign: 'center',
marginTop: 20,
fontSize: 17,
}}>
We are listening
</Text>
</View>
</Modal>
{/* <View style={{justifyContent: 'center', alignItems: 'center'}}>
<Text>Video</Text>
<View>
<TouchableHighlight
onPress={stopRecognizing}
style={styles.buttonStyle}>
<Text style={styles.buttonTextStyle}>Stop</Text>
</TouchableHighlight>
</View>
<View>
<TouchableHighlight onPress={startRecognizing}>
<SimpleLineIcons name="microphone" size={25} />
</TouchableHighlight>
</View>
{results.map((item, index) => {
return (
<Text key={`result-${index}`} style={styles.textStyle}>
{item}
</Text>
);`enter code here`
})}
<Text>Results</Text>
</View> */}
</View>
);
}
const styles = StyleSheet.create({
backgroundVideo: {
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
right: 0,
},
});
The question is in my flat list at a time, there are three videos which are render .. I was trying to make the first video play and rest to be in paused state and after a 5 second of interval, the next video should be played
I tried demo which looks like instagram where the video which is in focused is played automatically and rest all are paused
I tried using scroll props and also video-autoscroll lib but coudlnot resolve
the videos are getting render but

How can i get back my data list after searching by words?

i use Flatlist and search by words on renderHeader() function. I can filter but when i delete letters, i couldn't get main list again. I think there is a logic problem but i couln't find after trying something...
i've got a new one when i fixed problem. I tried to fix but i couldn't do that, i should put the problem in front of experts :)
import React, {Component, useState} from 'react'
import {
Text,
StyleSheet,
View,
FlatList,
SafeAreaView,
ScrollView,
Image,
TextInput,
} from 'react-native'
import data from '../../data'
const Flatlistexample = () => {
//main list state
let [list, setList] = useState(data);
//search state
const [search, setSearch] = useState('');
//search filter
searchFilter = text => {
// onChangeText
const newData = list.filter(item => {
const listItem = `${item.name.toLowerCase()} ${item.company.toLowerCase()}`
return listItem.indexOf(text.toLowerCase()) > -1;
})
setList(newData)
}
//search function
renderHeader = () =>{
return (
<View style={styles.seachContainer}>
<TextInput
style={styles.textInput}
placeholder={'Search...'}
value={search}
onChangeText={text => {
//setStates
searchFilter(text)
setSearch(text)
}}></TextInput>
<Text
style={{
alignItems: 'flex-start',
color: 'black',
fontSize: 22,
}}>
{search}
</Text>
</View>
)
}
return (
<SafeAreaView
style={{
flex: 1,
}}>
<FlatList
data={list}
renderItem={({item, index}) => {
return (
<ScrollView>
<SafeAreaView
style={[
styles.container,
{backgroundColor: index % 2 === 0 ? '#fafafa' : '#bbb'},
]}>
<Image style={styles.profile} source={{uri: item.picture}} />
<View style={styles.rightside}>
<Text style={styles.name}>{item.name}</Text>
<Text style={styles.company}>{item.company}</Text>
</View>
</SafeAreaView>
</ScrollView>
)
}}
keyExtractor={item => item._id}
//called search function
ListHeaderComponent={renderHeader()}
/>
</SafeAreaView>
)
}
export default Flatlistexample
const styles = StyleSheet.create({
container: {
flexDirection: 'row',
alignItems: 'center',
borderBottomWidth: 1,
borderColor: 'gray',
},
profile: {
width: 50,
height: 50,
borderRadius: 25,
marginLeft: 10,
},
rightside: {
marginLeft: 20,
justifyContent: 'space-between',
marginVertical: 5,
},
name: {
fontSize: 22,
marginBottom: 10,
},
searchContainer: {
padding: 10,
borderWidth: 2,
borderColor: 'gray',
},
textInput: {
fontSize: 16,
backgroundColor: '#f9f9f9',
padding: 10,
},
})
Thanks for your help
Filter Data
onSearchText = (value) => {
this.setState({searchText: value})
if(value.trim() == "" || value == null){
this.setState({list: this.state.list}
} else {
let filter = this.state.list.fillter(data => {
// data fillter logic //
return data;
})
this.setState({filterData: filter})
}
Render FlatList
<FlatList
extradata={this.state}
data={searchText ? filterData : list}
/>
I fixed...
How?
My main data state is constant, i'm filtering on data list with filter state. So my data list doesn't change anytime.
import React, {Component, useState} from 'react'
import {
Text,
StyleSheet,
View,
FlatList,
SafeAreaView,
ScrollView,
Image,
TextInput,
} from 'react-native'
import data from '../../data'
const Flatlistexample = () => {
//main list state
let [list, setList] = useState(data)
//search state
const [search, setSearch] = useState('')
//filter state
const [updated, setUpdated] = useState(data)
//search filter
searchFilter = text => {
// onChangeText
if (text) {
const newData = list.filter(item => {
const listItem = `${item.name.toLowerCase()} ${item.company.toLowerCase()}`
return listItem.indexOf(text.toLowerCase()) > -1
})
setUpdated(newData)
}
//search function
renderHeader = () => {
return (
<View style={styles.seachContainer}>
<TextInput
style={styles.textInput}
placeholder={'Search...'}
value={search}
onChangeText={text => {
searchFilter(text)
setSearch(text)
}}></TextInput>
<Text
style={{
alignItems: 'flex-start',
color: 'black',
fontSize: 22,
}}>
{search}
</Text>
</View>
)
}
return (
<SafeAreaView
style={{
flex: 1,
}}>
<FlatList
//i'm showing filter state
data={updated}
renderItem={({item, index}) => {
return (
<ScrollView>
<SafeAreaView
style={[
styles.container,
{backgroundColor: index % 2 === 0 ? '#fafafa' : '#bbb'},
]}>
<Image style={styles.profile} source={{uri: item.picture}} />
<View style={styles.rightside}>
<Text style={styles.name}>{item.name}</Text>
<Text style={styles.company}>{item.company}</Text>
</View>
</SafeAreaView>
</ScrollView>
)
}}
keyExtractor={item => item._id}
//called search function
ListHeaderComponent={renderHeader()}
/>
</SafeAreaView>
)
}
export default Flatlistexample
const styles = StyleSheet.create({
container: {
flexDirection: 'row',
alignItems: 'center',
borderBottomWidth: 1,
borderColor: 'gray',
},
profile: {
width: 50,
height: 50,
borderRadius: 25,
marginLeft: 10,
},
rightside: {
marginLeft: 20,
justifyContent: 'space-between',
marginVertical: 5,
},
name: {
fontSize: 22,
marginBottom: 10,
},
searchContainer: {
padding: 10,
borderWidth: 2,
borderColor: 'gray',
},
textInput: {
fontSize: 16,
backgroundColor: '#f9f9f9',
padding: 10,
},
})
/*
else if(text.length > uzunluk){
setList(data)
const newData = list.filter(item => {
const listItem = `${item.name.toLowerCase()} ${item.company.toLowerCase()}`
return listItem.indexOf(text.toLowerCase()) > -1;
})
setList(newData)
}else if(text.length<uzunluk){
setList(data)
const newData = list.filter(item => {
const listItem = `${item.name.toLowerCase()} ${item.company.toLowerCase()}`
return listItem.indexOf(text.toLowerCase()) > -1;
})
setList(newData)
}
*/

Categories

Resources