How can I overwrite a CSV using Javascript? - javascript

After some searching I found a handy function that works with iMacros for saving a string to a CSV file. The problem is I can't figure out how to overwrite the file. The following code will only append data to the file. Thanks.
function WriteFile(path,string) {
//Import FileUtils.jsm
Components.utils.import("resource://gre/modules/FileUtils.jsm");
//Declare file
var file = new FileUtils.File(path);
//Declare file path
file.initWithPath(path);
//If it exists move on if not create it
if (!file.exists()) {
file.create(file.NORMAL_FILE_TYPE, 0666);
}
var charset = 'EUC-JP';
var fileStream = Components.classes['#mozilla.org/network/file-output-stream;1']
.createInstance(Components.interfaces.nsIFileOutputStream);
fileStream.init(file, 18, 0x200, false);
var converterStream = Components
.classes['#mozilla.org/intl/converter-output-stream;1']
.createInstance(Components.interfaces.nsIConverterOutputStream);
converterStream.init(fileStream, charset, string.length,
Components.interfaces.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
//Write file to location
converterStream.writeString(string); //+"\r\n"
converterStream.close();
fileStream.close();
}

Components.utils.import("resource://gre/modules/NetUtil.jsm");
Components.utils.import("resource://gre/modules/FileUtils.jsm");
// file is nsIFile, data is a string
// You can also optionally pass a flags parameter here. It defaults to
// FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE | FileUtils.MODE_TRUNCATE;
var ostream = FileUtils.openSafeFileOutputStream(file);
var converter = Components.classes["#mozilla.org/intl/scriptableunicodeconverter"].
createInstance(Components.interfaces.nsIScriptableUnicodeConverter);
converter.charset = "UTF-8";
var istream = converter.convertToInputStream(data);
// The last argument (the callback) is optional.
NetUtil.asyncCopy(istream, ostream, function(status) {
if (!Components.isSuccessCode(status)) {
// Handle error!
return;
}
// Data has been written to the file.
});
Read the sameple above it taken form this link.https://developer.mozilla.org/en-US/Add-ons/Code_snippets/File_I_O. You should specifiy the pass flag parameters to it.

Related

Read a csv or excel (xlsx) file with just javascript and html?

Is it possible to read a excel xlsx or csv, preferably xlsx, using just JavaScript and html. All the solutions (sheetsJS, d3{d3 uses the Fetch API}) I have found require a webserver. I understand I can get a simple webserver using web server for chrome or python or node.js. Futhermore, I understand I can run chrome with certain flags, but I would like to not do this because of security concerns. I am building a demo for someone who is not web savvy and would like to avoid doing this.
my file structure is very simple :
TestFolder
| index.html
| js/
| test.js
| data/
| test.xlsx
| css/
| test.css
I simply need to read the xlsx and then display that data in html page.
I've added a simple example that accepts Excel or CSV files (current example accepts a single file), uses the SheetJS library to parse the Excel file type, convert the data to JSON and logs the contents to the console.
This should be more than enough to complete your demo. Hope this helps!
var file = document.getElementById('docpicker')
var viewer = document.getElementById('dataviewer')
file.addEventListener('change', importFile);
function importFile(evt) {
var f = evt.target.files[0];
if (f) {
var r = new FileReader();
r.onload = e => {
var contents = processExcel(e.target.result);
console.log(contents)
}
r.readAsBinaryString(f);
} else {
console.log("Failed to load file");
}
}
function processExcel(data) {
var workbook = XLSX.read(data, {
type: 'binary'
});
var firstSheet = workbook.SheetNames[0];
var data = to_json(workbook);
return data
};
function to_json(workbook) {
var result = {};
workbook.SheetNames.forEach(function(sheetName) {
var roa = XLSX.utils.sheet_to_json(workbook.Sheets[sheetName], {
header: 1
});
if (roa.length) result[sheetName] = roa;
});
return JSON.stringify(result, 2, 2);
};
<script src="https://cdnjs.cloudflare.com/ajax/libs/xlsx/0.14.3/xlsx.full.min.js"></script>
<label for="avatar">Choose an Excel or CSV file:</label>
<input type="file" id="docpicker" accept=".csv,application/vnd.ms-excel,.xlt,application/vnd.ms-excel,.xla,application/vnd.ms-excel,.xlsx,application/vnd.openxmlformats-officedocument.spreadsheetml.sheet,.xltx,application/vnd.openxmlformats-officedocument.spreadsheetml.template,.xlsm,application/vnd.ms-excel.sheet.macroEnabled.12,.xltm,application/vnd.ms-excel.template.macroEnabled.12,.xlam,application/vnd.ms-excel.addin.macroEnabled.12,.xlsb,application/vnd.ms-excel.sheet.binary.macroEnabled.12">
<div id="dataviewer">
You could try using the Fetch API to download the file and process it with JavaScript.
fetch('data/test.xlsx').then(function(resp) {
// Process the data here...
console.log('Data Response: ', resp);
});
It would be much easier to work with if your data file was in JSON format, but this might work for your needs.
Update - Example when the data is in JSON format
fetch('data/test.xlsx').then(function(resp) {
var records = resp.json(); // Assuming that we receive a JSON array.
console.log('Records: ', records.length);
records.forEach(function(record){
console.log('Record Name: ', record.name); // Assuming each record has a name property
});
});
Here is how I ended up doing it:
I got error w/ readAsBinaryString so I went out w/ the below. I noted that sheet_to_json didn't work w/ csv so I ran that first and checked results and parsed sheet_to_csv if sheet_to_json === 0.
HTML:
<!-- SheetsJS CSV & XLSX -->
<script src="xlsx/xlsx.full.min.js"></script>
<!-- SheetsJS CSV & XLSX -->
<!-- CSV/XLSX -->
<div class="ms-font-xl ms-settings__content__subtitle">CSV/XLSX Upload:</div>
<input type="file" id="csv-xlsx-file" accept=".csv,application/vnd.ms-excel,.xlt,application/vnd.ms-excel,.xla,application/vnd.ms-excel,.xlsx,application/vnd.openxmlformats-officedocument.spreadsheetml.sheet,.xltx,application/vnd.openxmlformats-officedocument.spreadsheetml.template,.xlsm,application/vnd.ms-excel.sheet.macroEnabled.12,.xltm,application/vnd.ms-excel.template.macroEnabled.12,.xlam,application/vnd.ms-excel.addin.macroEnabled.12,.xlsb,application/vnd.ms-excel.sheet.binary.macroEnabled.12">
<!-- CSV/XLSX -->
JS:
var csv_file_elm = document.getElementById("csv-xlsx-file")
csv_file_elm.addEventListener('change', CSV_XLSX_File_Selected_Event)
async function CSV_XLSX_File_Selected_Event() {
var id = this.id
var inputElement = document.getElementById(id)
let ext = inputElement.value
ext = ext.split(".")
ext = ext[ext.length - 1]
var files = inputElement.files || [];
if (!files.length) return;
var file = files[0];
var reader = new FileReader();
reader.onloadend = async function (event) {
var arrayBuffer = reader.result;
var options = { type: 'array' };
var workbook = XLSX.read(arrayBuffer, options);
//console.timeEnd();
var sheetName = workbook.SheetNames
var sheet = workbook.Sheets[sheetName]
var sheet_to_html = XLSX.utils.sheet_to_html(sheet)
var sheet_to_json = XLSX.utils.sheet_to_json(sheet)
if (sheet_to_json.length === 0) {
var sheet_to_csv = [XLSX.utils.sheet_to_csv(sheet)]
var results = sheet_to_csv
}
if (sheet_to_json.length > 0) {
var results = sheet_to_json
}
let Parsed_File_Obj = {
"sheet_to_html": sheet_to_html,
"results": results,
"ext": ext,
}
console.log('Parsed_File_Obj')
console.log(Parsed_File_Obj)
};
reader.readAsArrayBuffer(file);
}

How can I get the local path of the currently open excel workbook?

I'm using the default 'Excel web add-in' template in Visual Studio 2017. I'm trying to create an excel add-in that inserts a copy of an existing workbook into the current one. The first step is to get the full path and name of the current workbook. I got the code from here. I'm using the beta excel API. At the line 'var myFile = document.getElementById("file");' myFile is always null. I assume the null value is because the workbook isn't 'loaded' but the workbook does open when I run the program.
Here is the code from Home.js:
'use strict';
(function () {
Office.onReady(function () {
// Office is ready
$(document).ready(function () {
// The document is ready
$('#RunMacroButton').click(RunMacro);
});
});
function RunMacro() {
var myFile = document.getElementById("file");
var reader = new FileReader();
reader.onload = (function (event) {
Excel.run(function (context) {
// strip off the metadata before the base64-encoded string
var startIndex = event.target.result.indexOf("base64,");
var workbookContents = event.target.result.substr(startIndex + 7);
Excel.createWorkbook(workbookContents);
return context.sync();
}).catch(errorHandlerFunction);
});
// read in the file as a data URL so we can parse the base64-encoded string
reader.readAsDataURL(myFile.files[0]);
}
})();
The line 'var myFile = document.getElementById("file");' is always null because there is no element called "file". This is also the wrong way to get the path of the currently open workbook. Instead use 'Office.context.document.url' to return the path.

How to get objects from txt file to use in javascript array?

I am very new to coding and javascript; just a few days in. I was wondering if there was a way to import objects from a text file(separated by lines) to use in my array: replyText. Here is what I'm working with:
// Variables
var theButton = document.getElementById("theButton");
var mainText = document.getElementById("mainText");
var replyText = [...,...,...,...,];
var i = 0;
// Functions
function nextText() {
mainText.innerHTML = replyText[i++ % replyText.length];
}
// MAIN SCRIPT
theButton.onclick = function() {
nextText();
};
You can use XMLHttpRequest to get the .txt file just pass the path of it.
var file = new XMLHttpRequest();
file.open("GET", "file:/../file.txt", false);
file.onreadystatechange = function () {
if (file.readyState === 4) {
if (file.status === 200 || file.status == 0) {
var text = file.responseText;
alert(text);
}
}
}
EDIT: you must pass the absolute path file:///C:/your/path/to/file.txt
For client/browser-side file reading:
You cannot easily read a file on the client-side as you are not allowed direct access to the client's file system. However, you can place a input element of file type in your HTML markup via which the client can load a file for your program to process. For example:
<input type="file" id="file" onchange="readFile()" />
Now when the client selects a file for use, the readFile() function will be called which will read and process the file. Here's an example:
function readFile() {
var file = document.getElementById('file').files[0]; // select the input element from the DOM
var fileReader = new FileReader(); // initialize a new File Reader object
fileReader.onload(function() { // call this function when file is loaded
console.log(this.result); // <--- You can access the file data from this variable
// Do necessary processing on the file
});
fileReader.readAsText(file); // Read the file as text
}
For more information on File Reader, check out the docs.
To add on to Paulo's solution, read below for splitting string by line breaks (new line character)
var replyText = text.split("\n"); // "\n" is new line character

How can I set a folder for After Effects to watch for JSON/text files?

I'm successfully using the following extend-script (with json2.js) to read a local JSON file, and change a text layer in my project. How could I modify this script so that, when ran, it continuously 'watches' for new JSON files that are added to the directory, and runs the rest of the script?
#include "json2.js" // jshint ignore:line
var script_file = File($.fileName); // get the location of the script file
var script_file_path = script_file.path; // get the path
var file_to_read = File(script_file_path + "/unique-job-id.json");
var my_JSON_object = null; // create an empty variable
var content; // this will hold the String content from the file
if(file_to_read !== false){// if it is really there
file_to_read.open('r'); // open it
content = file_to_read.read(); // read it
my_JSON_object = JSON.parse(content);// now evaluate the string from the file
//alert(my_JSON_object.arr[1]); // if it all went fine we have now a JSON Object instead of a string call length
var theComposition = app.project.item(1);
var theTextLayer = theComposition.layers[1];
theTextLayer.property("Source Text").setValue(my_JSON_object.arr[2]);
file_to_read.close(); // always close files after reading
}else{
alert("Error reading JSON"); // if something went wrong
}
Look at the Object Model:
Application scheduleTask() method
app.scheduleTask(stringToE xecute, delay, repeat)
Description:
Schedules the specified JavaScript for delayed execution.
So app.scheduleTask(string,delay,true) is exactly what you are looking for.Like this:
app.schduleTask('taskToWatchFile()',1000,true);
function taskToWatchFile(){
/*
*Add your code here
*/
}

Apps script write to Big Query unknown error

This is supposed to read in a CSV and then write it to bigquery. When it runs, however, nothing is written, and there are no errors logged. I read that I need to write a csv and then turn it into an Octet Stream. I am not sure whether or not this is compatible with google bigquery.
function test(){
try{
var tableReference = BigQuery.newTableReference();
tableReference.setProjectId(PROJECT_ID);
tableReference.setDatasetId(datasetId);
tableReference.setTableId(tableId);
var schema = "CUSTOMER:string, CLASSNUM:integer, CLASSDESC:string, CSR:string, CSR2:string, INSURANCE:string, REFERRALGENERAL:string, REFERRALSPECIFIC:string, NOTES:string, INMIN:integer, INHR:integer, OUTMIN:integer, OUTHR:integer, WAITMIN:integer, WAITHR:integer, DATETIMESTAMP:float, DATEYR:integer,DATEMONTH:integer, DATEDAY:integer";
var load = BigQuery.newJobConfigurationLoad();
load.setDestinationTable(tableReference);
load.setSourceUris(URIs);
load.setSourceFormat('NEWLINE_DELIMITED_JSON');
load.setSchema(schema);
load.setMaxBadRecords(0);
load.setWriteDisposition('WRITE_TRUNCATE');
var configuration = BigQuery.newJobConfiguration();
configuration.setLoad(load);
var newJob = BigQuery.newJob();
newJob.setConfiguration(configuration);
var loadr = DriveApp.getFilesByName("test.csv");
var x = loadr.next().getBlob();
Logger.log(x.getDataAsString());
var d = DriveApp.getFilesByName("test.csv");
var id = d.next().getId();
Logger.log(id);
var data = DocsList.getFileById(id).getBlob().getDataAsString();
var mediaData = Utilities.newBlob(data, 'application/octet-stream');
BigQuery.Jobs.insert(newJob, PROJECT_ID, mediaData)
}
catch(error){Logger.log("A" + error.message);}
}
Your sourceFormat is wrong for CSV files:
The format of the data files. For CSV files, specify "CSV". For
datastore backups, specify "DATASTORE_BACKUP". For newline-delimited
JSON, specify "NEWLINE_DELIMITED_JSON". The default value is CSV.
https://developers.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceUris
On the other hand I think you don't need at all the load.setSourceUris(URIs); since you try to load from local file, and not from Google Cloud Storage. Check this python example https://developers.google.com/bigquery/loading-data-into-bigquery

Categories

Resources