Input file multiple use with Firebase - javascript

hello I'm working on a project to store images in firebase and then get the url of each image and store it in a database (firebase) the problem that I can not store those urls in the same registry would like to know if I can create an array with the urls of the images that I upload from a multiple input file and save them in a single record in this way.
parques:{
nombre: "xxxxx",
imgs:[
"urls"
]
}
Here I leave the code:
var files = document.getElementById('file');
var fullDirectory= 'imgs';
var btnDatos= $('#btn-datos');
var btnImgs = $('#btn-imgs');
//proceso imgs
files.addEventListener('change', function(e){
for (var i = 0; i < e.target.files.length; i++) {
var imageFile = e.target.files[i];
Imagencollage(imageFile);
}
})
//subir imagenes
function Imagencollage(imageFile){
var links= [];
btnImgs.click(function (){
var storageRef = firebase.storage().ref(fullDirectory+"/"+imageFile.name);
var task = storageRef.put(imageFile);
var name = imageFile.name;
task.on('state_changed', function(snapshot){
//??
}, function(error) {
alert('no se subio el archivo');
}, function() {
task.snapshot.ref.getDownloadURL().then(function(downloadURL) {
links.push(downloadURL);
direccionImg(name, links);
});
});
})
}
//subir datos
function direccionImg(name, links){
btnDatos.click(function(){
var db= firebase.database().ref('parques/');
var parque= $('#datos').val();
var data= {
nombre: parque,
imgs: links
}
db.push().set(data)
})
}
In front of you, thank you for the help

Related

CSV file to JS object for nodejs

I'm looking to retrieve data from a local CSV file, store it in an Object, then send it via an API. But when I try to display data[i].id; to see if I can get a value, it is undefined.
I use ya-csv to parse the CSV file.
myFile.csv
id;name;email;
1;John;john#doe.com
2;Jane;jane#doe.com
csvParser.js
const csv = require('ya-csv');
const data = [];
const file = 'myFile.csv';
function loopForPrint() {
for (var i = 0; i < data.length; i++) {
let id = data[i].id;
console.log(id);
}
}
function csvToJson() {
var reader = csv.createCsvFileReader(file, {columnsFromHeader: true, 'separator': ';'});
reader.addListener('data', function(data) {
data.push(data);
loopForPrint();
})
reader.addListener('end', function() {
console.log('end');
});
};
First of all, you can remove the tailing ; so that there won't be an empty key and undefined value
Secondly, you can replace this code with
reader.addListener('data', function(data) {
data.push(data);
loopForPrint();
})
this one
reader.addListener('row', function(row) {
data.push(row);
loopForPrint();
})
as data is defined globally and locally will create a conflict
Here is the complete code
const csv = require('ya-csv');
const data = [];
const file = 'myFile.csv';
function loopForPrint() {
for (var i = 0; i < data.length; i++) {
console.log(data[i].id);
}
}
function csvToJson() {
var reader = csv.createCsvFileReader(file, {columnsFromHeader: true, 'separator': ';'});
reader.addListener('data', function(row) {
data.push(row);
})
reader.addListener('end', function() {
loopForPrint();
console.log('end');
});
};
csvToJson()
Output
1
2
end
I have also created StackBlitz for this if you want to look at it

Get data out from callback inside query

I am trying to get data out from query inside the query. I tried everything without success.
getvideos() {
var sectionHolder = [];
var ids = [];
var titles = [];
var videos = [];
var challengeCl = Parse.Object.extend("Challenges");
var query = new Parse.Query(challengeCl);
query.find().then((data) => {
for(var key = 0; key < data.length; key++) {
var object = data[key];
var videoid = object.id;
var title = object.get('title');
ids.push(videoid)
titles.push(title)
var video = Parse.Object.extend('Videos');
var queryv = new Parse.Query(video);
queryv.equalTo('challengeid', videoid);
queryv.find().then(
(video) => {
videos.push(video); //i want to get this data out from this scope
});
sectionHolder.push({title: title, data: videos}); // <---- i want to put videos data in data
}
console.log(sectionHolder);
this.setState({listData: sectionHolder});
});
}
I read parse documentation but there is nothing related to this.

Multiple Base64 image within 1 array causes overriding

Let's say I want to upload 2 images to an ajax, I will send them using this format
{ "base64StringName:" : "[ {"1": "base64_1"}, {"2" : "base64_2"} ]"}
So its an object that contains an array of objects of base64 strings
To do so, I will need to create an array and inside this array, I will push json objects into it.
Here is my code for this:
<script>
var test ='';
var imageArray =[];
var imageObject ={};
$('#inputFileToLoad').on('change', function(){
imageArray.length = 0;
fileCount = this.files.length;
for(i = 0; i < fileCount; i++){
var file = document.querySelector('#inputFileToLoad').files[i];
var reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function () {
test = reader.result.split(',')[1];
console.log(test);
imageObject[i] = test;
imageArray.push(imageObject);
};
reader.onerror = function (error) {
alert('Error: ', error);
};
}
});
$('#inputFileToLoadButton').on('click', function(){
console.log(imageArray);
$.ajax({
url:"some url",
method:"POST",
data: {
"base64String": imageArray
}
,success: function () {
swal("Success!","Upload Finished!","success");
//add redirect!
},
error: function (jqXHR) {
swal("Error",jqXHR.responseText, "error");
}
});
});
</script>
However, I encounter a problem, my first object inside the array somehow gets overwritten.
it becomes
{ "base64StringName:" : "[ {"1": "base64_2"}, {"2" : "base64_2"} ]"}
Also when i printed out the first base64 encoded file at console.log(test); it is undefined, but when i printed out the second base64 encoded file, it prints the second file only.
try this:
var test = '';
var imageArray = [];
var imageObject;
$('#inputFileToLoad').on('change', function() {
imageArray.length = 0;
fileCount = this.files.length;
for (i = 0; i < fileCount; i++) {
debugger;
var file = document.querySelector('#inputFileToLoad').files[i];
var reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function() {
test = this.result.split(',')[1];
imageObject = {};
imageObject[index] = test;
imageArray.push(imageObject);
}.bind({
index: i
});
reader.onerror = function(error) {
alert('Error: ', error);
};
}
});

Parse does not show all my file once I add a new file

My web application allows users to create and view files they have created. When the user logs in they can see all the files they created, however when the users creates a new file, and then clicks to view all the files, the newly created file is not there.
Here is my code for saving the file in parse:
router.post('/newfile', function(req, res) {
var usercode = req.body.code;
console.log(usercode);
newname = req.body.name;
console.log(newname);
var bytes = [];
var doc = {};
for (var i = 0; i < usercode.length; i++) {
bytes.push(usercode.charCodeAt(i));
};
console.log("passed byetes");
var parseFile = new Parse.File(newname, bytes);
parseFile.save();
var FileClass = Parse.Object.extend("File");
var newFile = new FileClass();
newFile.save({
user: Parse.User.current(),
fileName: newname,
javaFile: parseFile
});
var newFileClass = Parse.Object.extend("File");
var query = new Parse.Query(newFileClass);
query.find(function(results) {
for (var i = 0; i < results.length; i++) {
var object = results[i];
var codefile = object.get('javaFile');
temp = codefile.name();
name = temp.split("-").pop();
url = codefile.url();
doc[name] = url;
}
}).then(function() {
console.log("Inside then(function()");
console.log(doc);
res.json({
FIles: JSON.stringify(doc)
});
});
});
So once the new file is saved, I do a parse.query.find to get all the files again. When the program comes to the line console.log(doc) it should print all the files including the new one, but it only prints the old files. However once I log out and log back in the new file is there. How do I fix this such that after the user saves the new file it appears along with the other files?
save() runs asynchronously, so all of that code after save() runs before it, before the query will be able to show you anything new . Restructure like this:
save().then(function () {
// everything that follows your save() here
var FileClass = Parse.Object.extend("File");
// etc.
});
EDIT - better code formatting in your edit revealed that there are several asynch functions being attempted, use promises to keep them straight...
router.post('/newfile', function(req, res) {
var usercode = req.body.code;
console.log(usercode);
newname = req.body.name;
console.log(newname);
var bytes = [];
var doc = {};
for (var i = 0; i < usercode.length; i++) {
bytes.push(usercode.charCodeAt(i));
};
console.log("passed byetes");
var parseFile = new Parse.File(newname, bytes);
parseFile.save();
var FileClass = Parse.Object.extend("File");
var newFile = new FileClass();
newFile.save().then(function () {
user: Parse.User.current(),
fileName: newname,
javaFile: parseFile
var newFileClass = Parse.Object.extend("File");
var query = new Parse.Query(newFileClass);
return query.find();
}).then(function(results) {
for (var i = 0; i < results.length; i++) {
var object = results[i];
var codefile = object.get('javaFile');
temp = codefile.name();
name = temp.split("-").pop();
url = codefile.url();
doc[name] = url;
}
console.log("Inside then(function()");
console.log(doc);
res.json({
FIles: JSON.stringify(doc)
});
});
});
I find even this a little tough on the eyes. I can't be sure I didn't just inadvertently code in a syntax error for you. An even better restructure would be to divide this big function into several promise-returning smaller ones, named by what they promise to do.

HTML5 IndexedDB - How to merge all records into one

My idea is download chunks a huge file from server.
Chunks stored into an IndexedDB.
After download all chunks, merge all records into one (as SQL SELECT * FROM XXX ...).
Save into disk "saveAs()" or create URL to IndexedDB...
(Or any idea?)
I do not know how to do step 2. and 3.
(Below is an example of storing 10,000 records in the DB and the expected output after step 2. should be SusanSusanSusanSusanSusan...)
if (transaction) {
transaction.oncomplete = function () {
}
transaction.onabort = function () {
writeToConsoleScreen("transaction aborted.");
localDatabase.db.close();
}
transaction.ontimeout = function () {
writeToConsoleScreen("transaction timeout.");
localDatabase.db.close();
}
var store = transaction.objectStore(osTableName);
if (store) {
var req;
var customer = {};
// create ten thousand records
for (var loop = 0; loop < 10000; loop++) {
customer = {};
customer.fname = 'Susan';
req = store.add(customer);
req.onsuccess = function (ev) {
}
req.onerror = function (ev) {
writeToConsoleScreen("Failed to add record." + " Error: " + ev.message);
}
}
}
}
<!DOCTYPE html>
<script>
var open = indexedDB.open('chunks-example');
open.onupgradeneeded = function() {
// Create schema if necessary
var db = open.result;
db.createObjectStore('chunks');
};
// 1. Chunks stored into an IndexedDB.
open.onsuccess = function() {
var db = open.result;
var tx = db.transaction('chunks', 'readwrite');
var store = tx.objectStore('chunks');
for (var i = 0; i < 10; ++i) {
// For realz, this would be via
// XMLHttpRequest.response and async.
var chunk = new Blob(['chunk ' + i + '\n'],
{type: 'application/octet-stream'});
store.put(chunk, i);
}
tx.oncomplete = function() { merge(db); };
};
// 2. After "download" all chunks, merge all records into one
function merge(db) {
var tx = db.transaction('chunks');
var store = tx.objectStore('chunks');
var chunks = [];
var request = store.openCursor();
request.onsuccess = function() {
var cursor = request.result;
if (cursor) {
chunks.push(cursor.value);
cursor.continue();
} else {
saveAs('myfile', new Blob(chunks,
{type: 'application/octet-stream'}));
}
};
}
// 3. Save into disk "saveAs()"
function saveAs(filename, blob) {
var a = document.documentElement.appendChild(document.createElement('a'));
a.href = URL.createObjectURL(blob);
a.download = filename;
a.click();
a.parentElement.remove(a);
}
</script>

Categories

Resources