Unzip a folder with files as a content on Angularjs, client side - javascript

I have a big issue or I don't find how to get a solution to this .
I need to request $http.get and I get a zip file, into this zip file I have images that I need to use on my client side.
I tried doing with different responseType="arraybuffer" and no solution.
My logic is: get the zip file, then maybe I create the folder into my client side and reuse these images that come in that zipFile.
Someone know a logic and how to implement the solution?
Thanks a million.

I looked into using JSZip (links for API and upgrade guide) to download a zip and extract image files, and came up with the following proof of concept code (outside Angularjs):
getZIP using JSZIP
function getZip( url){ // returns a Promise for the zip
var resolve, reject, req;
var req = new XMLHttpRequest();
req.open('GET', url, true);
req.responseType = "arraybuffer";
req.onload = function() {
var zip = new JSZip(); // ************
resolve( zip.loadAsync(req.response));
};
req.onError = req.onAbort = function(){
reject( new Error("GET from " + url + " failed"));
}
req.send();
return new Promise( function(r,j) {resolve=r; reject=j;});
}
which returns a promise that fulfills with a zip object if successful, and
A test to download a zip, extract and display an image
window.onload=function(){
getZip("./test.zip")
.then( processZip)
.catch( function( err){
console.log("something went wrong: " + err);
});
}
function processZip( zip){
zip.file("test.png")
.async("base64")
.then( function( data){
var img = new Image();
img.src = "data:image/png;base64," + data;
document.body.appendChild(img);
})
.catch( function(err){ throw err;});
}
which requires a test.zip file containing test.png in the same folder as the test page.
Although the concept appears to be working, there may be limitations (RAM usage, size of image files, performance) and browser caching (without immediate expiry) may be another way of speeding up image access.

Related

Express.js: respond with image file instead of binary file?

In express, I'm trying to respond with an image to a React request, however, I get the image binary file instead of the image file.
I tried using res.sendFile and res.download, but they send the binary file. I also tried res.attachment but it works inconsistently, and somehow makes the respond pending.
I tried specifying the file type with res.type("image/jpeg"), but it doesn't make a difference.
router.get("/photo", (req, res) => {
res.type("image/jpeg");
res.sendFile("/absolute/path/image.jpg");
});
I'm expecting an image file with normal properties of name, size, etc.
I think the distinction you are drawing between an "image file" and an "image binary file" is misleading you. All bitmap image files are encoded in binary (or, on rare occasions, base64), so I don't think that gets to the root of your problem.
res.sendFile should work just fine. The problem is in your client-side JavaScript.
To display image data in the browser, you will ultimately have to use canvas or img HTML elements. The easiest way to asynchronously load an image in your app would be to update the src attribute of an img element that already exists in the DOM to the address of the image and let the browser handle the loading for you.
However, if you want to manipulate the image data prior to loading it to an img or canvas element, I would recommend using the FileReader API, as opposed to manually parsing the binary.
The key step with this approach is to set the response data type to "blob" when you make your get request
The blob data type references the binary image file, but allows you to use the browser's built-in File interface.
The code below requests an image file as a blob and then converts the blob into a base64 encoded data url that you can use as the src attribute of an img element or load to a canvas.
var xhr = new XMLHttpRequest();
xhr.responseType = "blob";
xhr.onload = function(event) {
fileToDataUrl(event.target.response, function(result){
console.log(result);
});
};
xhr.open('GET', "https://i.imgur.com/7VhSUEH.jpg", true);
xhr.send();
function fileToDataUrl(fileObj, callback) {
var reader = new FileReader();
reader.addEventListener("load", function() {
console.log("result")
callback(reader.result);
}, false);
reader.readAsDataURL(fileObj);
}
Try this:
const fs = require("fs");
router.get("/photo", (req, res) => {
fs.readFile("/absolute/path/image.jpg", function(err, data) {
if (err) return next(err);
res.set("Content-Type", "image/jpeg");
return res.status(200).end(data, 'binary');;
});
});
var filename = __dirname+ imagePath;
var readStream = fs.createReadStream(filename);
readStream.on('open', function () {
readStream.pipe(res);
});
readStream.on('error', function(err) {
res.end(err);
});
Please don't forget to put fs dependency

Create Javascript File/Blob object from image URI

Is it possible to create a File or Blob object for my image out of an image URI?
Using Cordova Image Picker, a plugin on my mobile app, I can retrieve photo URI's that look like this: "file:///data/user/0/..../image.jpg"
However, I am now trying to create a File or Blob object which Google Firebase requires for my images to be uploaded. I just can't figure out how. Every solution I try seems to be wrong to the point where I think I'm looking at it from a complete wrong perspective. I'm new to Javascript. Thanks a lot!
Have a look at a question that I posted a while back which deals with this but for videos (same principle applies): Uploading video to firebase (3.0) storage using cordovaFileTransfer
You need to read as an arrayBuffer using cordova's file plugin, then blob; something like:
var file_path = "root/to/directory";
var name = "filename.jpg";
$cordovaFile.readAsArrayBuffer(file_path, name)
.then(function (success) {
// success
console.log(success);
blob = new Blob([success], {type: "image/jpeg"});
console.log(blob);
var uploadTask = storageRef.child(name).put(blob);
uploadTask.on('state_changed', function(snapshot){
// Observe state change events such as progress, pause, and resume
// See below for more detail
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
}, function(error) {
// Handle unsuccessful uploads
console.log("Error uploading: " + error)
}, function() {
// Handle successful uploads on complete
// For instance, get the download URL: https://firebasestorage.googleapis.com/...
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("Success!", downloadURL);
});
}, function (error) {
// error
console.log("Failed to read file from directory, error.code);
}
If your program passes you a full path to the image you will need to separate the name from the path to the directory where the image is stored. Just look for everything after the final /

Downloading Torrent with Node.JS

I was wondering if anyone had an example of how to download a torrent using NodeJS? Essentially, I have an RSS Feed of torrents that I iterate through and grab the torrent file url, then would like to initiate a download of that torrent on the server.
I've parsed and looped through the RSS just fine, however I've tried a few npm packages but they've either crashed or were just unstable. If anyone has any suggestions, examples, anything... I would greatly appreciate it. Thanks.
router.get('/', function(req, res) {
var options = {};
parser.parseURL('rss feed here', options, function(err, articles) {
var i = 0;
var torrent;
for (var title in articles.items) {
console.log(articles.items[i]['url']);
//download torrent here
i++;
}
});
});
You can use node-torrent for this.
Then, to download a torrent:
var Client = require('node-torrent');
var client = new Client({logLevel: 'DEBUG'});
var torrent = client.addTorrent('a.torrent');
// when the torrent completes, move it's files to another area
torrent.on('complete', function() {
console.log('complete!');
torrent.files.forEach(function(file) {
var newPath = '/new/path/' + file.path;
fs.rename(file.path, newPath);
// while still seeding need to make sure file.path points to the right place
file.path = newPath;
});
});
Alternatively, for more control, you can use transmission-dæmon and control it via its xml-rpc protocol. There's a node module called transmission that does the job! Exemple:
var Transmission = require('./')
var transmission = new Transmission({
port : 9091,
host : '127.0.0.1'
});
transmission.addUrl('my.torrent', {
"download-dir" : "/home/torrents"
}, function(err, result) {
if (err) {
return console.log(err);
}
var id = result.id;
console.log('Just added a new torrent.');
console.log('Torrent ID: ' + id);
getTorrent(id);
});
If you are working with video torrents, you may be interested in Torrent Stream Server. It a server that downloads and streams video at the same time, so you can watch the video without fully downloading it. It's based on torrent-stream library.
Another interesting project is webtorrent. It's a nice torrent library that works in both: NodeJs & browser and has streaming support. From my experience, it doesn't have very good support in the browser, but should fully work in NodeJS.

Saving images from URL using JSzip

I'm using JSzip to download the html of a div. The div has images inside of it (they're not base64 encoded). Is there a way I can use JSzip to download the files from their image path url? or do they have to be base64 encoded?
My current code is just the basic demo code from the JSzip site (http://stuk.github.io/jszip/)
var zip = new JSZip();
var email = $('.Result').html();
zip.file("test.html", email);
var content = zip.generate({type:"blob"});
// see FileSaver.js
saveAs(content, "example.zip");
You might want to try using JSzip-utils it has a call just for downloading images from urls also take a look at the example in JSzip documentation I found it to be very good. You can find working example with code here.
This is just part for downloading that I'm also using to download images from social media using their image source urls.
function urlToPromise(url) {
return new Promise(function(resolve, reject) {
JSZipUtils.getBinaryContent(url, function (err, data) {
if(err) {
reject(err);
} else {
resolve(data);
}
});
});
}
var zip = new JSZip();
zip.file(filename, urlToPromise(url), {binary:true});
zip.generateAsync({type:"blob"})
.then(function callback(blob) {
// see FileSaver.js
saveAs(blob, "example.zip");
});
Here is my solution (adapted from here) building within an angular framework (though readily applicable to other frontend approaches):
NOTE: this only works if you are packaging resources -- EVEN IMAGES -- from the same origin, or that are served with 'cross-origin-resource-sharing': '*'
Make sure the JSZip UMD is included in your global namespace. In my angular case, I saved it via npm i -S jszip, and then copied the node_modules/jszip/dist/jszip.js script to my src/assets folder and included it in angular.json's scripts array.
Angular only: to get the jszip typescript definition file to work, copy node_modules/jszip/index.d.ts somewhere into src
Download npm i -S file-saver and import it as an ES6 module (see below).
Run the following function when you want the download event to occur:
import { saveAs } from 'file-saver';
async downloadData() {
// Fetch the image and parse the response stream as a blob
const imageBlob = await fetch('[YOUR CORS IMAGE URL]').then(response => response.blob());
// create a new file from the blob object
const imgData = new File([imageBlob], 'filename.jpg');
// Copy-pasted from JSZip documentation
var zip = new JSZip();
zip.file('Hello.txt', 'Hello World\n');
var img = zip.folder('images');
img.file('smile.gif', imgData, { base64: true });
zip.generateAsync({ type: 'blob' }).then(function(content) {
saveAs(content, 'example.zip');
});
}
First of all you need to download all the images with ajax. if they are on the same domain you are in luck, otherwise you need CORS or a proxy.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function(){
if (xhr.status == 200){
//Do something with xhr.response (not responseText), which should be a Blob
}
});
xhr.open('GET', 'http://target.url');
xhr.responseType = 'blob';
xhr.send(null);
When you got the image you have to manipulate the src in all <img>'s either you replace them with base64 or referring them to a folder were you have put them in a folder with JSZip
var reader = new FileReader();
reader.onload = function() {
showout.value = this.result;
};
reader.readAsDataURL(xhr.response);

node.js unable to serve image when it is being overwritten

I have a node.js app that periodically poll images and store them onto filesystem.
The problem is, when node.js is overwriting the images, whoever that visits the website at that moment will see blank images everywhere (because the images is being overwritten at that moment).
This only happens for that few seconds whenever it is time to poll the images, but it is annoying. Is there anyway to be able to still serve the image while we are overwriting it?
Code to save/overwrite image:
// This method saves a remote path into a file name.
// It will first check if the path has something to download
function saveRemoteImage(path, fileName)
{
isImagePathAvailable(path, function(isAvailable)
{
if(isAvailable)
{
console.log("image path %s is valid. download now...", path);
console.log("Downloading image file from %s -> %s", path, fileName);
var ws = fs.createWriteStream(fileName);
ws.on('error', function(err) { console.log("ERROR DOWNLOADIN IMAGE FILE: " + err); });
request(path).pipe(ws);
}
else
{
console.log("image path %s is invalid. do not download.");
}
});
}
Code to serve image:
fs.exists(filePath, function(exists)
{
if (exists)
{
// serve file
var stat = fs.statSync(filePath);
res.writeHead(200, {
'Content-Type': 'image/png',
'Content-Length': stat.size
});
var readStream = fs.createReadStream(filePath);
readStream.pipe(res);
return;
}
I'd suggest writing the new version of the image to a temporary file:
var ws = fs.createWriteStream(fileName + '.tmp');
var temp = request(path).pipe(ws);
and renaming it when the file is entirely downloaded:
temp.on('finish', function() {
fs.rename(fileName + '.tmp', fileName);
});
We use the 'finish' event, which is fired when all the data has been written to the underlying system, ie. the filesystem.
May be it is better to
serve the old version of file while downloading;
download new file to temporary file (say _fileName, for example);
rename file after downloading thus rewriting original file;

Categories

Resources