In my controller, when I try to readFile send from browser by AJAX, suddenly 1 directory created into my public folder with something like
'3d6c3049-839b-40ce-9aa3-b76f08bf140b' -> file -> myfile
exports.assetAdd = function(req, res) {
var d = JSON.parse(req.body.data);
var f = req.files.file;
return ;
//here i can see my unwanted created directory
// Create S3 service object
var s3 = new AWS.S3({
apiVersion: '2017-03-01'
});
// console.log("file",f)
fs.readFile(f.file, function(err, data) {
return res.json(data);
How to remove this?
This is issue with the package, Already opened issue
https://github.com/yahoo/express-busboy/issues/16
I am trying to load my own trained data to tesseract.js. As the file is placed locally, I tried to load everything offline. The code I used is shown below:
<script src="tesseract.js"></script>
<script>
//Set the worker, core and lang to local files
(function() {
var path = (function() { //absolute path
var pathArray = window.location.pathname.split( '/' );
pathArray.pop(); //Remove the last ("**.html")
return window.location.origin + pathArray.join("/");
})();
console.log(path);
window.Tesseract = Tesseract.create({
workerPath: path + '/worker.js',
//langPath: path + '/traineddata/',
corePath: path + '/index.js',
});
})();
</script>
<script>
function recognizeFile(file){
document.querySelector("#log").innerHTML = ''
Tesseract.recognize(file, {
lang: document.querySelector('#langsel').value
})
.progress(function(packet){
console.info(packet)
progressUpdate(packet)
})
.then(function(data){
console.log(data)
progressUpdate({ status: 'done', data: data })
})
}
</script>
The code above is working fine if the langPath is not set, but when I point the langPath to a local folder, Tesseract failed to load anything with the following error:
Failed loading language 'eng'
Tesseract couldn't load any languages!
...
AdaptedTemplates != NULL:Error:Assert failed:in file ../classify/adaptmatch.cpp, line 190
SCRIPT0: abort() at Error
at Na (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:36:24)
at ka (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:511:83)
at Module.de._abort (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:377:166)
at $L (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:387:55709)
at jpa (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:392:22274)
at lT (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:391:80568)
at mT (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:391:80698)
at BS (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:391:69009)
at bP (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:387:110094)
at jT (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:391:80280)
at RJ (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:387:19088)
at QJ (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:387:17789)
at zI (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:403:90852)
at tw (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:49079)
at rw (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:48155)
at lw (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:39071)
at _v (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:22565)
at aw (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:24925)
at cw (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:401:27237)
at oj (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:386:24689)
at Og (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:386:10421)
at $.prototype.Recognize (file:///C:/Users/user/Downloads/tesseract.js-master/dist/index.js:558:379)
at Anonymous function (file:///C:/Users/user/Downloads/tesseract.js-master/dist/worker.js:8814:9)
at Anonymous function (file:///C:/Users/user/Downloads/tesseract.js-master/dist/worker.js:8786:9)
at xhr.onerror (file:///C:/Users/user/Downloads/tesseract.js-master/dist/worker.js:8429:9)
If this abort() is unexpected, build with -s ASSERTIONS=1 which can give more information.
index.js (8,1)
I have both eng.traineddata and eng.traineddata.gz in the /traineddata folder as apparently the ungzip process is skipped. Is there anything I neglected? Any help is appreciated.
I know this question is an old but recently I needed to use Tesseract.js in one of my projects. I needed to load Data Files locally so here is what I have done.
Instead of creating a new worker. I modified the default worker options available. So I didn't use Tesseract.createWorker and directly set the path and used recognize instead.
Tesseract.workerOptions.langPath =
window.location.origin // take protocol://domain.com part
+ "/scripts/tesseract/dist/"; // location of data files
//you could set core and worker paths too but I didn't need it
Tesseract.workerOptions.workerPath =
window.location.origin // take protocol://domain.com part
+ "/scripts/tesseract/dist/worker.js"; // location of worker.js
//you could set core and worker paths too but I didn't need it
Tesseract.workerOptions.corePath =
window.location.origin // take protocol://domain.com part
+ "/scripts/tesseract/dist/index.js"; // location of index.js
//example lang path would be protocol://domain.com/scripts/tesseract/dist/
By doing this, I left the worker and core paths untouched pointing to Default CDN.
PS: When using local worker.js and core.js paths I was getting uncaught error on postMessage() in worker.js. That's why I am using local path for langData only. I still don't know how to fix it or why it is happening. But, You can follow this issue here and here
I solved the problem by taking the corePath file from tesseract.js-core 0.1.0
window.Tesseract = Tesseract.create({
workerPath: window.location.origin + "/tesseract/worker.js", //tesseract.js-1.0.10
langPath: window.location.origin + "/tesseract/",
corePath: window.location.origin + "/tesseract/index.js", //tesseract.js-core-0.1.0
});
And language gz from https://github.com/naptha/tessdata/tree/gh-pages/3.02
From a Swift 2.1-based iOS client app using AFNetworking 2.0, I'm uploading a PNG image file to a Node.js server. The issue I'm running into is that when I use a .png file name extension (as is usual), the file gets larger when it is uploaded. The original file size is: 917,630 bytes, and the uploaded size is 1,298,016 bytes. Curiously, this not a completely corrupting change in the file contents. I.e., I can still view the image with Preview on Mac OS X (though see Update1 below).
Here's the guts of of my client app upload code:
public class func uploadFileTo(serverURL: NSURL, fileToUpload:NSURL, withParameters parameters:[String:AnyObject]?, completion:((serverResponse:[String:AnyObject]?, error:NSError?)->())?) {
Log.special("serverURL: \(serverURL)")
var sendParameters:[String:AnyObject]? = parameters
#if DEBUG
if (SMTest.session.serverDebugTest != nil) {
if parameters == nil {
sendParameters = [String:AnyObject]()
}
sendParameters![SMServerConstants.debugTestCaseKey] = SMTest.session.serverDebugTest
}
#endif
self.manager.POST(serverURL.absoluteString, parameters: sendParameters, constructingBodyWithBlock: { (formData: AFMultipartFormData) in
// NOTE!!! the name: given here *must* match up with that used on the server in the "multer" single parameter.
// Was getting an odd try/catch error here, so this is the reason for "try!"; see https://github.com/AFNetworking/AFNetworking/issues/3005
// 12/12/15; I think this issue was because I wasn't doing the do/try/catch, however.
do {
try formData.appendPartWithFileURL(fileToUpload, name: SMServerConstants.fileUploadFieldName)
} catch let error {
let message = "Failed to appendPartWithFileURL: \(fileToUpload); error: \(error)!"
Log.error(message)
completion?(serverResponse: nil, error: Error.Create(message))
}
}, success: { (request: AFHTTPRequestOperation, response:AnyObject) in
if let responseDict = response as? [String:AnyObject] {
print("AFNetworking Success: \(response)")
completion?(serverResponse: responseDict, error: nil)
}
else {
let error = Error.Create("No dictionary given in response")
print("**** AFNetworking FAILURE: \(error)")
completion?(serverResponse: nil, error: error)
}
}, failure: { (request: AFHTTPRequestOperation?, error:NSError) in
print("**** AFNetworking FAILURE: \(error)")
completion?(serverResponse: nil, error: error)
})
On the Node.js, here's the package.json:
{
"name": "node1",
"version": "1.0.0",
"description": "Test",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"body-parser": "^1.14.1",
"fs-extra": "^0.26.2",
"google-auth-library": "^0.9.7",
"googleapis": "^2.1.6",
"mongodb": "^2.0.49",
"multer": "^1.1.0",
"sweet": "^0.1.1",
"tracer": "^0.8.2"
},
"devDependencies": {
"sweet.js": "^0.7.4"
}
}
Here's the initial part of my server "index.js" file (a lot of which is specific to my project):
// Before the files get moved to their specific-user destination.
const initialUploadDirectory = './initialUploads/';
// TODO: What is safe mode in mongo? E.g., see https://mongodb.github.io/node-mongodb-native/api-generated/collection.html#insert
// See also options on insert https://mongodb.github.io/node-mongodb-native/api-generated/collection.html#insert
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
// https://github.com/expressjs/multer
var multer = require('multer');
var fse = require('fs-extra');
// Local modules.
var ServerConstants = require('./ServerConstants');
var Mongo = require('./Mongo');
var Operation = require('./Operation');
var PSLock = require('./PSLock');
var PSOutboundFileChange = require('./PSOutboundFileChange.sjs');
var FileTransfers = require('./FileTransfers');
var File = require('./File.sjs')
var logger = require('./Logger');
var PSOperationId = require('./PSOperationId.sjs');
var PSFileIndex = require('./PSFileIndex');
// See http://stackoverflow.com/questions/31496100/cannot-app-usemulter-requires-middleware-function-error
// See also https://codeforgeek.com/2014/11/file-uploads-using-node-js/
// TODO: Limit the size of the uploaded file.
// TODO: Is there a way with multer to add a callback that gets called periodically as an upload is occurring? We could use this to "refresh" an activity state for a lock to make sure that, even with a long-running upload (or download) if it is still making progress, that we wouldn't lose a lock.
var upload = multer({ dest: initialUploadDirectory}).single(ServerConstants.fileUploadFieldName)
// http://stackoverflow.com/questions/4295782/how-do-you-extract-post-data-in-node-js
app.use(bodyParser.json({extended : true}));
And here's the initial (relevant) part of the REST/API entry point for the upload:
app.post('/' + ServerConstants.operationUploadFile, upload, function (request, response) {
var op = new Operation(request, response);
if (op.error) {
op.end();
return;
}
/* request.file has the info on the uploaded file: e.g.,
{ fieldname: 'file',
originalname: 'upload.txt',
encoding: '7bit',
mimetype: 'text/plain',
destination: './uploads/',
filename: 'e9a4080c46777d6341518afedec8af31',
path: 'uploads/e9a4080c46777d6341518afedec8af31',
size: 22 }
*/
op.validateUser(function (psLock, psOperationId) {
// User is on the system.
//console.log("request.file: " + JSON.stringify(request.file));
// Make sure user/device has started uploads. i.e., make sure this user/device has the lock.
if (!psLock) {
var message = "Error: Don't have the lock!";
logger.error(message);
op.endWithRCAndErrorDetails(ServerConstants.rcServerAPIError, message);
} else if (psOperationId.operationStatus ==
ServerConstants.rcOperationStatusInProgress) {
// This check is to deal with error recovery.
var message = "Error: Have lock, but operation is already in progress!";
logger.error(message);
op.endWithRCAndErrorDetails(ServerConstants.rcServerAPIError, message);
} else {
logger.info("We've got the lock!");
// Leave the parameter checking below until after checking for the lock because we're just checking for a lock, and not creating a lock.
// 12/12/15; Ran into a bug where the upload failed, and .file object wasn't defined.
if (!isDefined(request.file) || !isDefined(request.file.path)) {
var message = "No file uploaded!";
logger.error(message);
op.endWithRCAndErrorDetails(ServerConstants.rcServerAPIError, message);
return;
}
logger.info(JSON.stringify(request.file));
...
And here's the logger.info output:
{"fieldname":"file","originalname":"Test.png","encoding":"7bit","mimetype":"image/png","destination":"./initialUploads/","filename":"da17e16904ed376fb21052c80b88da12","path":"initialUploads/da17e16904ed376fb21052c80b88da12","size":1298016}
When I change the file extension to .bin (just some non-standard extension), the file size is not increased-- i.e., it remains the smaller value I'd originally expected.
The ratio between the two files sizes is 1.41453091 (= 1,298,016/917,630). Which looks oddly close to the square root of 2. Some encoding issue?
Thoughts?
Update1:
When I use ImageMagick's identify program, I get reasonable output for the image before uploading, but after uploading (with the larger image), I get:
$ identify -verbose example2.png identify: CgBI: unhandled critical
chunk example2.png' # error/png.c/MagickPNGErrorHandler/1630.
identify: corrupt imageexample2.png' #
error/png.c/ReadPNGImage/3959.
Update2:
I think I can now say for sure that this is a client-side issue related to AFNetworking and not a server-side issue related to Node.js. I make this inference because when I make a simplified Node.js server (using all of the same "header" code as my actual server) as below:
index.js
'use strict';
require('sweet.js').loadMacro('./macros.sjs');
var server = require("./Server.sjs");
Server.sjs
// Before the files get moved to their specific-user destination.
const initialUploadDirectory = './initialUploads/';
// TODO: What is safe mode in mongo? E.g., see https://mongodb.github.io/node-mongodb-native/api-generated/collection.html#insert
// See also options on insert https://mongodb.github.io/node-mongodb-native/api-generated/collection.html#insert
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
// https://github.com/expressjs/multer
var multer = require('multer');
var fse = require('fs-extra');
// Local modules.
var ServerConstants = require('./ServerConstants');
var Mongo = require('./Mongo');
var Operation = require('./Operation');
var PSLock = require('./PSLock');
var PSOutboundFileChange = require('./PSOutboundFileChange.sjs');
var FileTransfers = require('./FileTransfers');
var File = require('./File.sjs')
var logger = require('./Logger');
var PSOperationId = require('./PSOperationId.sjs');
var PSFileIndex = require('./PSFileIndex');
// See http://stackoverflow.com/questions/31496100/cannot-app-usemulter-requires-middleware-function-error
// See also https://codeforgeek.com/2014/11/file-uploads-using-node-js/
// TODO: Limit the size of the uploaded file.
// TODO: Is there a way with multer to add a callback that gets called periodically as an upload is occurring? We could use this to "refresh" an activity state for a lock to make sure that, even with a long-running upload (or download) if it is still making progress, that we wouldn't lose a lock.
var upload = multer({ dest: initialUploadDirectory}).single(ServerConstants.fileUploadFieldName)
// http://stackoverflow.com/questions/4295782/how-do-you-extract-post-data-in-node-js
app.use(bodyParser.json({extended : true}));
// Server main.
Mongo.connect();
app.post('/upload', upload, function (request, response) {
console.log(JSON.stringify(request.file));
var result = {};
response.end(JSON.stringify(result));
});
app.listen(8081);
console.log('Server running at http://127.0.0.1:8081/');
and then test that using Postman, uploading my example PNG file, I get the uploaded file with no increase in size. Here's the Node.js output:
{"fieldname":"file","originalname":"Example.png","encoding":"7bit","mimetype":"image/png","destination":"./initialUploads/","filename":"ac7c8c93d50bf48cf6042409ef990658","path":"initialUploads/ac7c8c93d50bf48cf6042409ef990658","size":917630}
Then, when I drop the above app.post method into my actual server, and again test the upload with Postman (not my example app using AFNetworking), I still do not get the increase in file size:
{"fieldname":"file","originalname":"Example.png","encoding":"7bit","mimetype":"image/png","destination":"./initialUploads/","filename":"a40c738a172eb9ea6cccce357338beeb","path":"initialUploads/a40c738a172eb9ea6cccce357338beeb","size":917630}
So far so good, without using AFNetworking.
And finally, when I add an additional test into my iOS client app, using the simplified app.post on the server, and using AFNetworking for the client (and I'm using AFNetworking 3 now), but only using the file upload post on the client, I get:
{"fieldname":"file","originalname":"Example.png","encoding":"7bit","mimetype":"image/png","destination":"./initialUploads/","filename":"8c1116337fd2650d4f113b227252e555","path":"initialUploads/8c1116337fd2650d4f113b227252e555","size":1298016}
That is, using AFNetworking again on the client, I again get the larger file size.
Aha! I've now learned that this is not specific to AFNetworking, but is definitely client side. I switched to uploading an NSData object and from the following code:
let fileData = NSData(contentsOfURL: fileToUpload)
Log.special("size of fileData: \(fileData!.length)")
I find that my file isn't the length I thought it was. This gives a length of 1,298,016 bytes. Note that this file is in the app bundle and shows up in the Terminal as 917,630 bytes. WTF? Is Apple's process of putting the .png into the bundle changing the .png file?
Creating an Archive of the app using Xcode, and digging into that directory structure, I find that yes indeed, in the app bundle, the file size is 1298016 bytes. Ouch. This "solves" my problem by introducing two other questions: (1) Why does Apple change the size/content of .png files in your app bundle, and (2) How to usefully do testing/development in an app where you need sample image data in the bundle?
Im using the following library for node unzip
https://github.com/EvanOxfeld/node-unzip
The code which I use is
var extractor = unzip.Extract({
path: 'C://TestFolder//TestZip'
}).on('close', function () {
console.log("Success to unzip");
}).on('error', function (err) {
console.log("error to unzip", err);
});
req.pipe(extractor);
The problem that In some zip file Im getting the error (in others its works fine)
[Error: invalid signature: 0x8080014]
[Error: invalid signature: 0x83870008]
....
This error doesnt give a lot info...
searching the web I found this
https://github.com/EvanOxfeld/node-unzip/issues/41
And install and require the unzip2 package instead of unzip ,
the issue now that Im getting the following error
unzip Error: invalid signature: 0xff000001
I use the same code for unzip and unzip2 (which I provided in the post above),do I need to use it different? any hints how to solve it?
UPDATE
I send the zip file from postman like following
You can temporary save the ZIP file on your disk, and then extract it using adm-zip.
Here is a sample code:
Client Side:
<form action="/upload" method="post" enctype="multipart/form-data">
Select image to upload:
<input type="file" name="fileToUpload" id="fileToUpload">
<input type="submit" value="Upload Image" name="submit">
</form>
Server Side
Using multer to save the uploaded file, and adm-zip to extract it.
You need to install both:
npm install --save multer
npm install --save adm-zip
After installing here an example of using them together:
var multer=require('multer') // a module for saving file from form.
var AdmZip = require('adm-zip'); // a module for extracting files
var express=require('express') // module for receving HTTP traffic
var app=express()
var upload = multer({ dest: 'uploads/' })
app.post('/upload',upload.single('fileToUpload'),function(req,res){
console.log('The file uploaded to:' + req.file.path)
var zip = new AdmZip(req.file.path);
zip.extractAllTo( "/detination_folder/");
})
Information about the modules I used:
https://github.com/expressjs/multer , https://github.com/cthackers/adm-zip
Node-unzip2 patches this problem.
example :
var readStream = fs.createReadStream('path/to/archive.zip');
var writeStream = fstream.Writer('output/path');
readStream
.pipe(unzip.Parse())
.pipe(writeStream)
Try your unzip solution, but for receiving the binary data, attach this middleware and then get your file from req.rawBody:
app.use(function(req, res, next) {
var data = new Buffer('');
req.on('data', function(chunk) {
data = Buffer.concat([data, chunk]);
});
req.on('end', function() {
req.rawBody = data;
next();
});
});
As #Amina said.
You can temporary save the ZIP file on your disk, and then extract it
using whatever unzipper package like unzip,adm-zip,unzip2,unzippy or whatever you like it.
For some information, My App structure like this below :
//App path --> C:\xampp\htdocs\service
service\
|
-- tmp\
|
-- app.js
|
-- index.html
You're using unzip2 right ?
Here's my code:
Server Side:
I'm using unzip2 to extract the zipFile, you can test it using postman too. Don't forget using enctype="multipart/form-data" when you post it. :D
var express = require("express");
var fs = require("fs");
var unzip = require("unzip2");
var app = express();
var multer = require("multer");
var multer_dest = multer({dest: "./tmp"}).single('zipFile');
app.post("/upload_zip",multer_dest,function(req,res){
console.log(req.file);
fs.createReadStream(req.file.path).pipe(unzip.Extract({path: 'C:\\TestFolder\\TestZip'}));
result = {
file:req.file,
message:"File has been extracted"
};
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
res.end(JSON.stringify(result));
});
var server = app.listen(8081,function(){
var host = server.address().address;
var port = server.address().port;
console.log("Example App Listening at http://%s:%s",host,port);
})
Output :
I would like to know how I can define a list of js files in one place (preferably within gruntfile.js or external json file) and have it written to a template (alternating between dev and prod environments).
JS file list definition
var jsFiles = [
'js/one.js',
'js/two.js'
];
Template (this is how I would like it to work)
if(isProd) {
print('<script type="text/javascript" src="js/combined.js"></script>\n');
} else {
for(var i = 0, len = jsFiles.length; i < len; i++) {
print('<script type="text/javascript" src="' + jsFiles[i] + '"></script>\n');
}
}
Template Result (development)
<script src="js/one.js" type="text/javascript"></script>
<script src="js/two.js" type="text/javascript"></script>
Template Result (production)
<script src="js/combined.js" type="text/javascript"></script>
You can use the config module to define a list of resources for both development and production environments: same property, but defined in different files, config/development.js and config/production.js.
The config module will automatically load the correct file for the current environment:
// config/development.js
module.exports = {
jsFiles : [ 'js/one.js', 'js/two.js' ]
};
// config/production.js
module.exports = {
jsFiles : [ 'js/combined.js' ]
};
If you are using Express with a rendering engine, you can pass the list of files to each template:
// assuming `app` is an Express app
var config = require('config');
app.locals.jsFiles = config.jsFiles;
You can also use these lists from your Gruntfile.js, although you need to require them (since you probably need to use both);
var development = require('config/development');
var production = require('config/production');
...