How Play base64 encoded mp3 in Phonegap? - javascript

After many search I could play mp3 with this code in Phonegap:
if (device.platform == 'Android') {
src = '/android_asset/www/' + src;
}
var mediaRes = new Media(src,
function onSuccess() {
// release the media resource once finished playing
mediaRes.release();
},
function onError(e){
console.log("error playing sound: " + JSON.stringify(e));
});
mediaRes.play();
But I need to play a base64 encoded mp3 not a mp3 file
can anyone help me?

Related

Webcam Recordings from in .mp4 or .webm are not play on MacOS Safari browser and on iOS devices

I record a video through VideoJS. The code looks like this:
// Video recording via webcam
var videoMaxLengthInSeconds = 180;
// Inialize the video player
let videoBlob;
var player = videojs("myVideo", {
controls: true,
width: 720,
height: 480,
fluid: false,
plugins: {
record: {
audio: true,
video: true,
maxLength: videoMaxLengthInSeconds,
debug: true,
videoMimeType: "video/mp4"
}
}
}, function() {
// print version information at startup
videojs.log(
'Using video.js', videojs.VERSION,
'with videojs-record', videojs.getPluginVersion('record'),
'and recordrtc', RecordRTC.version
);
});
// error handling for getUserMedia
player.on('deviceError', function() {
console.log('device error:', player.deviceErrorCode);
});
// Handle error events of the video player
player.on('error', function(error) {
console.log('error:', error);
});
// user clicked the record button and started recording !
player.on('startRecord', function() {
console.log('started recording! Do whatever you need to');
});
// user completed recording and stream is available
// Upload the Blob to your server or download it locally !
let recording;
let recordingData;
player.on('finishRecord', function() {
// the blob object contains the recorded data that
// can be downloaded by the user, stored on server etc.
recordingData = player.recordedData;
videoBlob = player.recordedData.video;
//let myblob = new Blob(player.recordedData, { type: "video/webm" });
let objectURL = window.URL.createObjectURL(player.recordedData)
let downloadButton = document.getElementById('downloadButton');
downloadButton.href = objectURL;
downloadButton.download = "Vlog.webm";
//recording = new File(myBlob, 'vlog.webm')
console.log(recording)
console.log('finished recording: ', videoBlob);
});
// Sending recorder video to server
$('#postButton').click(function() {
// Get form data
form = document.querySelectorAll('#form');
let formData = new FormData(form[0]);
let disabled = document.getElementById("commentsDisable").checked
console.log("Comments Enabled: " + disabled)
formData.append('commentsDisabled', disabled);
let selection = document.getElementById('categorySelect');
let selected = selection.options[selection.selectedIndex].value;
// Append selected category
formData.append('category', selected)
//Apend YouTube embed link
if (ytUrl) {
formData.append('ytlink', ytUrl)
}
// Append recordedBlob to form data as file
if (recordingData) {
console.log('Recording detected: ' + recordingData)
formData.append('videoFile', recordingData, recordingData.name);
}
//Append video from local upload
if (tempFile) {
formData.append('videoFile', tempFile);
}
// Send POST request via AJAX to server
$.ajax({
type: "POST",
url: "/make_vlog/",
processData: false,
contentType: false,
data: formData,
success: function(response) {
alert(response);
//location.href = "/vlogs";
}
});
});``
On the server side I have a django app which stores the file as .mp4 creates a new Vlog model.
When I open the page the video is loaded and can be played by all browsers. Except Safari and iOS devices don't play the video (Format not supported).
When I upload video from file instead of webcam recording. And the file is a valid mp4 video (for example from here:example_video) the file is played on every device and browser.
I think the problem is the video encoding in my js code. The same problem occurs with .webm file as well.
When I download the webm, convert into mp4 in VLC and upload on the server the video is played correctly.
Does anyone have experience with such problem?
Thanks
You need to convert the webm videos to mp4 server site for playback in Safari.
On web based webcam recording each browser saves in specific native format (mime type). Safari saves mp4/mp3 while other browsers usually save webm.
Changing the file extension does not help. You need to convert the video.
You can convert the webm to mp4 with ffmpeg, server side.

Fluent-ffmpeg: merging video and audio = wrong frames

I'm trying to merge a video (mp4) without audio stream with an audio file (mp3). I'm developing under nodewebkit a video software which means that I have to use ogg files, so when the user upload a video or a audio file it converts it in ogg whatever its format. Then when the user want to export its video I’m exporting frames from a canvas to PNG images. Once this is done I’m creating a video from the frames with a 30 fps with this following code:
var videoMaker = function () {
console.log('videoMaker');
var deffered = Q.defer();
if (!FS.existsSync($rootScope.project.path + '/video')) {
filestorageService.createFolder($rootScope.project.path + '/video');
}
audioMaker().then(function () {
var commandVideo = new Ffmpeg({
source: $rootScope.project.path + '/frames/%d.png'
});
commandVideo.setFfmpegPath(ffmpegPath);
commandVideo.addOptions(['-c:v libx264', '-r 30']).withFpsInput(30).format('mp4').on('error', function (err) {
console.log('video', err);
}).on('end', function () {
console.log('video win');
deffered.resolve();
}).save($rootScope.project.path + '/video/rendu.mp4');
});
return deffered.promise;
};
Then i'm reconverting the audio wich has been uploaded by the user to mp3:
var audioMaker = function () {
console.log('audioMaker');
var deffered = Q.defer();
if ($rootScope.project.settings.music.path !== '') {
FS.writeFileSync($rootScope.project.path + '/music/finalMusic.mp3', null);
var commandAudio = new Ffmpeg({
source: $rootScope.project.settings.music.path
});
commandAudio.setFfmpegPath(ffmpegPath);
if ($rootScope.project.settings.music.fadeIn) {
commandAudio.audioFilters('afade=t=in:ss=0:d=0.5');
}
console.log($rootScope.project.settings.music.fadeOut, $rootScope.project.settings.music.fadeIn);
if ($rootScope.project.settings.music.fadeOut) {
var time = sceneService.getTotalDuration() - 0.5;
commandAudio.audioFilters('afade=t=out:st=' + time + ':d=0.5');
}
commandAudio.toFormat('mp3').on('end', function () {
console.log('audio win');
deffered.resolve();
}).on('error', function (err) {
console.log('audio', err);
}).save($rootScope.project.path + '/music/finalMusic.mp3');
} else {
deffered.resolve();
}
return deffered.promise;
};
Until there everything is alright those files work well but when i do this:
var command = new Ffmpeg({
source: $rootScope.project.path + '/video/rendu.mp4'
});
command.setFfmpegPath(ffmpegPath);
console.log($rootScope.project.settings.music.path !== '');
if ($rootScope.project.settings.music.path !== '') {
command.addInput($rootScope.project.path + '/music/finalMusic.mp3');
command.addOptions(['-c:v copy', '-c:a copy']);
if ($rootScope.project.settings.music.duration > sceneService.getTotalDuration()) {
command.addOptions(['-shortest']);
}
command.on('error', function (err) {
console.log(err);
}).on('end', function () {
console.log("win");
//filestorageService.rmFolder($rootScope.project.path + '/frames');
}).save($rootScope.project.path + '/video/rendu.mp4');
} else {
filestorageService.rmFolder($rootScope.project.path + '/frames');
}
And my final file has the music and the right duration but the frames aren't right, any ideas?
I finally find out how to fix this, the final video wasn't good because i was merging the video and the audio to the same video file which means that i was getting the data of the video via outsream while i was writing on the file. So in my program i created a temp folder which contains the video and the audio then i merge them to a new video file which is in an other final folder to finally delete the temp folder.

Click on image to play sound PHONEGAP

I am clicking on my image to call the playAudio(). I am using phonegap to do this.
Here is my function
<script type="text/javascript" charset="utf-8">
// Audio player
//
var my_media = null;
// Play audio
//
function playAudio(src) {
if (my_media == null) {
// Create Media object from src
my_media = new Media(src, onSuccess, onError);
} // else play current audio
// Play audio
my_media.play();
}
// onSuccess Callback
//
function onSuccess() {
console.log("playAudio():Audio Success");
}
// onError Callback
//
function onError(error) {
alert('code: ' + error.code + '\n' +
'message: ' + error.message + '\n');
}
</script>
Here is my image
<input type="image" id="myimage" src="numb/equal.png" style="height:100px;width:100px;margin-top:0px" onclick="playAudio('/android_asset/www/Mobile/sound/'+ abc +'.mp3');"/>
On clicking this image it should play the corresponding mp3. But in my console I am getting "Media is not Defined"
Here is the screenshot of the error
How can i fix this ?
If you are using Phoengap/Cordova, you shouldn't include /android_asset/www/ in the path to the mp3. You can think of that /android_asset/www/ folder as the 'context root' - all relative URLs will automatically have this appended to them.
So just change onclick="playAudio('/android_asset/www/Mobile/sound/'+ abc +'.mp3');" to
onclick="playAudio('./Mobile/sound/'+ abc +'.mp3');" and it should find the correct path to your audio.
I wouldnt do it this way. With HTML5 you can play audio files very easily with the AUDIO TAG. Make your javascript create the element that is described and set the autoplay property.

Showing an image from LocalStore in HTML on PhoneGap/Cordovia on Windows Phone 7

What is required to show an image from LocalStorage in HTML markup on PhoneGap running on Windows Phone 7?
an image is downloaded from the Internet and stored on the phone (on Windows Phone 7 it can be stored only in LocalStorege of the application's domain);
this image is to be shown using PhoneGap \ Cordova HTML markup with <img> element;
using <img src="xyz"/> ain't working;
The solution was quite different from Android version.
Following steps are necessary:
load the image from localstore as binary data;
place it in the "src" attribute of the img element encoded;
Code:
var fileName = 'myappname/test.png';
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, onFileSystemSuccess, onFail);
function onFileSystemSuccess (fileSystem) {
fileSystem.root.getFile(fileName, null, gotFileEntry, onFail);
}
function gotFileEntry(fileEntry) {
fileEntry.file(onGotFile, onFail);
}
function gotFile(onGotFile) {
var reader = new FileReader();
reader.onloadend = function (evt) {
$('#outerDiv').html('<img src="' + evt.target.result + '" />');
};
reader.readAsDataURL(file);
}
function onFail(evt) {
console.log('error: ' + evt.target.error.code);
}

Process and upload video using getUserMedia()

I am trying to upload video from the users using the media capture interface recently introduced into javascript. Regardless of all the difficulties in browser compatibility, I can't even begin to understand the process of saving the video captured by the users.
I was thinking that I could somehow use ajax to push the streamed video up to the server, but be that as it may, I'm not even sure if I am approaching the problem appropriately.
I included my code, which currently only streams under chrome and opera.
function hasUserMedia()
{
return !!(navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
}
if(hasUserMedia())
{
var onFail = function(error)
{
alert("ERROR >> " + error);
};
var onPass = function(stream)
{
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(stream);
video.onloadedmetadata = function(e)
{
//..what do I put here..?
};
}
navigator.webkitGetUserMedia({video:true, audio:true}, onPass, onFail);
}
else
{
alert("ERROR >> USERMEDIA NOT SUPPORTED");
}
function saveVideo()
{
var connection = new XMLPHttpRequest();
connection.onreadystatechange=function()
{
if(connection.readyState == 4 && connection.status == 200)
{
alert("Your streamed video has been saved..!");
}
}
//..what do I type here..?
connection.open("POST","savevideo.php",true);
connection.send();
}
Currently you can't open a WebRTC connection to a server (though someone may be working on it...). AFAIK the only method is to take a screenshot of each frame by capturing the video to a canvas, then sending each from to the server and compile the video there.
Check out:
https://webrtc-experiment.appspot.com/RecordRTC/
Also may be helpful:
http://www.html5rocks.com/en/tutorials/getusermedia/intro/

Categories

Resources