Js Convert Photo Cam in Input File - javascript

I have a webcam that is taking a photo, mediaDevices.getUserMedia, in local SrcObject, but I want the photo to go to an input type = file. how do i convert?

You can't just programmatically put File object to <input type="file">
What you can do is create a Blob object out of captured screenshot and later on send it with rest of formdata.
const getScreenshotFromCameraBlob = () => {
const canvas = document.createElement('canvas');
const video = document.createElement('video');
video.setAttribute("autoplay", true);
return new Promise((resolve, reject) => navigator.mediaDevices
.getUserMedia({
video: true
})
.then((stream) => {
video.srcObject = stream;
video.addEventListener("loadeddata", () => {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.toBlob(resolve, 'image/jpeg');
})
})
.catch(reject)
)
}
getScreenshotFromCameraBlob()
.then(blob => {
console.log(blob);
sendScreenshot(blob);
})
.catch(error => {
console.log(error)
})
const sendScreenshot = (blob) => {
const formData = new FormData();
formData.append("screenshot", blob);
fetch('', {
method: 'POST',
body: formData
})
}

Related

How can I get video bitrate with javascript

I want to get the bitrate of video that uploader, because the backend need it.
var video = document.createElement('video');
video.preload = 'metadata';
video.src = URL.createObjectURL(document.getElementById('fileUp').files[0]);
window.URL.revokeObjectURL(video.src);
console.log(video.bitrate);
You can get the video duration then simply divide the file size by it to get an approximation (subtitles, audio and metadata would also be included in this value), as far as i know there is no standard api for getting the bitrate directly.
Example (credits https://stackoverflow.com/a/67899188/6072029 ) :
<div>
<script>
const getVideoInfos = (file) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve({
duration: media.duration,
file_size: file.size,
bitrate: file.size / media.duration,
});
};
reader.readAsDataURL(file);
reader.onerror = (error) => reject(error);
});
const handleChange = async (e) => {
const infos = await getVideoInfos(e.target.files[0]);
document.querySelector("#infos").innerText = `Infos : ${JSON.stringify(infos, null, 4)}`;
};
</script>
<input type="file" onchange="handleChange(event)" />
<p id="infos">infos: </p>
</div>

How to convert blob to png or jpg?

I have used react-images-zoom-print https://www.npmjs.com/package/react-images-zoom-print I am taking a photo of the API with a blob like this:
const downloadDepositMoneyInfosClickHandler = (row) => {
props.service(row.id, (status, data) => {
var blob = new Blob([data], { type: "image/png" });
var objectUrl = URL.createObjectURL(blob);
imageRef.current = objectUrl;
setImgDialogOpen(true);
});
};
> console.log(imageRef.current)==>
blob:http://localhost:3000/c85aiua36d-2fua-43a8-a881-311d287ed37e
and in Lightbox:
<Lightbox
images={[{src:"'"+imageRef.current+"'"}]}
isOpen={imgDialogOpen}
onClose={closeImgDialog}
rotatable={true}
zoomable={true}
onPrint={() => window.print()}
/>
But it does not show it, I think it should become a png or jpg.
I want to turn it into a photo with the type png or jpg.
How should I convert it?
Try this one:
const blobToImage = (blob) => {
return new Promise(resolve => {
const url = URL.createObjectURL(blob)
let img = new Image()
img.onload = () => {
URL.revokeObjectURL(url)
resolve(img)
}
img.src = url
})
}

How I can know audio/video duration before uploading?

I need to upload file (audio/video) using default input type='file' and the I should pass duration of the video in api request, how i ca do this?
const uploadFile = async (event) => {
let file = await event.target.files[0];
//here api POST request where i should pass duration
}:
You can get the audio duration with HTMLMediaElement.duration:
async function getDuration(file) {
const url = URL.createObjectURL(file);
return new Promise((resolve) => {
const audio = document.createElement("audio");
audio.muted = true;
const source = document.createElement("source");
source.src = url; //--> blob URL
audio.preload= "metadata";
audio.appendChild(source);
audio.onloadedmetadata = function(){
resolve(audio.duration)
};
});
}
Then in your function:
const uploadFile = async (event) => {
let file = event.target.files[0];
//here api POST request where i should pass duration
const duration = await getDuration(file);
}:
You just need to create an element based on user input(video/audio) and get the duration property -
const VIDEO = "video",
AUDIO = "audio";
const uploadApiCall = (file, data = {}) => {
// ----- YOUR API CALL CODE HERE -----
document.querySelector("#duration").innerHTML = `${data.duration}s`;
document.querySelector("#type").innerHTML = data.type;
};
let inputEl = document.querySelector("#fileinput");
inputEl.addEventListener("change", (e) => {
let fileType = "";
let file = inputEl.files[0];
if (file.type.startsWith("audio/")) {
fileType = AUDIO;
} else if (file.type.startsWith("video/")) {
fileType = VIDEO;
} else {
alert("Unsupported file");
return;
}
let dataURL = URL.createObjectURL(file);
let el = document.createElement(fileType);
el.src = dataURL;
el.onloadedmetadata = () => {
uploadApiCall(file, {
duration: el.duration,
type: fileType
});
};
});
<form>
<input type="file" accept="video/*,audio/*" id="fileinput" />
<hr />
Type:<span id="type"></span>
<br />
Duration:<span id="duration"></span>
</form>
In Vue 3 JS, I had to create a function first:
const getDuration = async (file) => {
const url = URL.createObjectURL(file);
return new Promise((resolve) => {
const audio = document.createElement("audio");
audio.muted = true;
const source = document.createElement("source");
source.src = url; //--> blob URL
audio.preload = "metadata";
audio.appendChild(source);
audio.onloadedmetadata = function(){
resolve(audio.duration)
};
});
}
The user would select an MP3 file. Then when it was submitted I could call that function in the Submit function:
const handleAudioSubmit = async () => {
console.log('Your Epsiode Audio is being stored... please stand by!')
if (file.value) {
// returns a number that represents audio seconds
duration.value = await getDuration(file.value)
// remove the decimals by rounding up
duration.value = Math.round(duration.value)
console.log("duration: ", duration.value)
// load the audio file to Firebase Storage using a composable function
await uploadAudio(file.value)
.then((downloadURL) => {
// composable function returns Firebase Storage location URL
epAudioUrl.value = downloadURL
})
.then(() => {
console.log("uploadAudio function finished")
})
.then(() => {
// Set the Album Fields based on the album id to Firestore DB
const updateAudio = doc(db, "artist", artistId.value, "albums, albumID.value);
updateDoc(updateAudio, {
audioUrl: audioUrl.value,
audioDuration: duration.value
})
console.log("Audio URL and Duration added to Firestore!")
})
.then(() => {
console.log('Episode Audio has been added!')
router.push({ name: 'Next' })
})
} else {
file.value = null
fileError.value = 'Please select an audio file (MP3)'
}
}
This takes some time to run and needs refactoring, but works provided you allow the async functions the time to finish. Hope that helps!

How to transform an audioblob in wav file using Reactjs or Javascript?

I am working on VUI interface with Reactjs frontend. I got a BLOB file that I can play but I want to convert it to .WAV file using REACT or Javascript to send it to my server.
I tried lot of things, but found no solution
toggleRecording() {
if (this.state.start === 1) {
console.log("we start recording", this.state.start)
this.setState({ start: 0, recognition: "" })
const constraints = {
audio: {
sampleRate: 16000,
channelCount: 1,
}
}
navigator.mediaDevices.getUserMedia(constraints)
.then(stream => {
console.log(this);
this.recorder = new MediaRecorder(stream);
this.recorder.start();
const audioChunks = [];
this.recorder.addEventListener("dataavailable", event => {
audioChunks.push(event.data);
});
this.recorder.addEventListener("stop", () => {
const audioBlob = new Blob(audioChunks, { 'type': 'audio/wav' });
const audioUrl = URL.createObjectURL(audioBlob);
console.log("test: ", audioUrl)
console.log(audioBlob.type)
fetch('http://127.0.0.1:6060/api/sendaudio', {
method: "post",
headers: { 'Content-Type': 'audio/wav' },
body: audioBlob
})
.then(response => {
return response.text()
}).then(text => {
console.log(text);
this.setState({ recognition: text })
});
//to play the audio file:
const audio = new Audio(audioUrl);
audio.play();
});
});
}
I expect to get a Wav file to post to my server but don't know how to do that ....
You can try this package if you don't have a problem to add new dependency: https://www.npmjs.com/package/audiobuffer-to-wav
Hope it will work for you

How to upload multiple image to firebase in react native fetch blob

I have multiple images that are stored in an array (image paths stored in array).
then I use a for loop to upload every image, but only the last image is uploaded.
I use react native fetch blob, and firebase
for(var i = 0; i < this.state.imagesUri;i++){
Blob.build(RNFetchBlob.wrap(this.state.imagesUri[i].path),{ type : 'image/jpeg' })
.then((blob) => firebase.storage()
.ref("userPhoto").child("image"+i)
.put(blob, { contentType : 'image/png' }).then(()=>{
var storage = firebase.storage().ref("userPhoto/").child("image"+i);
storage.getDownloadURL().then((url)=>{
var url = url;
});
})
);
}
i hope this will help
onSend(images) {
let photo = images.map( img=> img.image);
photo.forEach((image, i) => {
const sessionId = new Date().getTime();
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.XMLHttpRequest =
RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
let uploadBlob = null;
let mime = 'image/jpg';
const imageRef = this.image.child(`${sessionId}${i}`);
fs.readFile(image, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob;
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close();
return imageRef.getDownloadURL()
})
.then((url) => {
console.log(url)
})
.catch((error) => {
});
})
}
OK, first of all, you need to cache the length of the array this.state.imagesUri.
This will make your for loop look like so for(var i = 0, length = this.state.imagesUri.length; i < length;i++){, I hope you've noticed that you aren't checking anymore if the i < this.state.imagesUri (This is incorrect since imagesUri is an array).
I have this code. To upload multi image with firebase and 'rn-fetch-blob' on react native
export const uploadListImageMeal = (param) => {
const { imagesUrls, mime = 'application/octet-stream', userID, time } = param
const urls = imagesUrls.map((uri) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
// let uploadBlob = null
const currentTime = Date.now()
const imageRef = firebase.storage().ref(`images/${userID}/meal/${time}`).child(`${currentTime}.png`)
return fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
// uploadBlob = blob
return imageRef.put(blob._ref, blob, { contentType: mime })
})
.then(() => {
// uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
return (url)
})
.catch((error) => {
return host
})
})
return Promise.all(urls)
.then((data) => {
return data
})
}

Categories

Resources