mobilenet.js
var loadFrozenModel = require('#tensorflow/tfjs-converter');
var NamedTensorMap = require('#tensorflow/tfjs-converter');
var tfc = require('#tensorflow/tfjs-core');
var IMAGENET_CLASSES = require('./imagenet_classes');
const GOOGLE_CLOUD_STORAGE_DIR = 'https://storage.googleapis.com/tfjs-models/savedmodel/';
const MODEL_FILE_URL = 'mobilenet_v1_1.0_224/optimized_model.pb';
const WEIGHT_MANIFEST_FILE_URL = 'mobilenet_v1_1.0_224/weights_manifest.json';
const INPUT_NODE_NAME = 'input';
const OUTPUT_NODE_NAME = 'MobilenetV1/Predictions/Reshape_1';
const PREPROCESS_DIVISOR = tfc.scalar(255 / 2);
class MobileNet {
constructor() {}
async load() {
this.model = await loadFrozenModel(
GOOGLE_CLOUD_STORAGE_DIR + MODEL_FILE_URL,
GOOGLE_CLOUD_STORAGE_DIR + WEIGHT_MANIFEST_FILE_URL);
}
dispose() {
if (this.model) {
this.model.dispose();
}
}
predict(input) {
const preprocessedInput = tfc.div(
tfc.sub(input.asType('float32'), PREPROCESS_DIVISOR),
PREPROCESS_DIVISOR);
const reshapedInput =
preprocessedInput.reshape([1, ...preprocessedInput.shape]);
const dict = {};
dict[INPUT_NODE_NAME] = reshapedInput;
return this.model.execute(dict, OUTPUT_NODE_NAME);
}
getTopKClasses(predictions, topK) {
const values = predictions.dataSync();
predictions.dispose();
let predictionList = [];
for (let i = 0; i < values.length; i++) {
predictionList.push({value: values[i], index: i});
}
predictionList = predictionList
.sort((a, b) => {
return b.value - a.value;
})
.slice(0, topK);
return predictionList.map(x => {
return {label: IMAGENET_CLASSES[x.index], value: x.value};
});
}
}
module.exports = MobileNet;
test.js
var tfc = require('#tensorflow/tfjs-core');
var MobileNet = require('./mobilenet');
var fs = require('fs');
var image = require('get-image-data')
var i = 0;
var meta;
image('./cat.jpg', function(err, getImageData){
if(err) throw err;
console.log('start to image data ');
console.log(i++);
console.log("meta : " + getImageData.data.length);
console.log("getImageData :"+getImageData);
const mobileNet = new MobileNet();
console.time('Loading of model');
// await mobileNet.load();
console.timeEnd('Loading of model');
console.log("maybee this is error on the data type");
const pixels = tfc.fromPixels(image);
console.time('First prediction');
let result = mobileNet.predict(pixels);
const topK = mobileNet.getTopKClasses(result, 5);
console.timeEnd('First prediction');
resultElement.innerText = '';
topK.forEach(x => {
resultElement.innerText += `${x.value.toFixed(3)}: ${x.label}\n`;
});
console.time('Subsequent predictions');
result = mobileNet.predict(pixels);
mobileNet.getTopKClasses(result, 5);
console.timeEnd('Subsequent predictions');
mobileNet.dispose();
});
I want to analyze the image using the tensorflow.js.
But it doesn't work.
ReferenceError: ImageData is not defined
at MathBackendCPU.fromPixels (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/kernels/backend_cpu.js:75:31)
at Engine.fromPixels (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/engine.js:292:29)
at ArrayOps.fromPixels (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/ops/array_ops.js:195:41)
at /Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/ops/operation.js:11:61
at Object.Tracking.tidy (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/tracking.js:36:22)
at Object.descriptor.value [as fromPixels] (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/#tensorflow/tfjs-core/dist/ops/operation.js:11:26)
at /Users/leeyongmin/Documents/tfjs-converter-master-2/demo/test.js:26:22
at /Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/get-image-data/index.js:18:7
at load (/Users/leeyongmin/Documents/tfjs-converter-master-2/demo/node_modules/get-image/server.js:18:5)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:511:3)
Related
I have a directory of the tax file of employees. Each file has a filename as employee code. I am reading each file and extract some components and save to an array of employee objects.
const readline = require('readline');
let empArr = [];
function readFiles(dirname) {
fs.readdir(dirname, async function (err,filenames) {
if(err) {
return err;
}
for await (file of filenames) {
const filePath = path.join(__dirname,directoryPath,file);
const readStream = fs.createReadStream(filePath);
const fileContent = readline.createInterface({
input: readStream
});
let employeeObj = {
empId : '',
TotalEarning:'',
ProfessionalTax:0,
GrossIncome:0,
isDone:false
};
fileContent.on('line', function(line) {
if(!employeeObj.empId && line.includes("Employee:")) {
const empId = line.replace('Employee: ','').split(" ")[0];
employeeObj.empId = empId;
}
else if(line.includes('Total Earnings')) {
const amount = line.replace(/[^0-9.]/g,'');
employeeObj.TotalEarning = amount;
}
else if(line.includes('Profession Tax')) {
const amount = line.split(" ").pop() || 0;
employeeObj.ProfessionalTax = amount;
}
else if(line.includes('Gross Income')) {
const amount = line.replace(/[^0-9.]/g,'');
employeeObj.GrossIncome = amount ||0;
}
else if(line.includes('finance department immediately')) {
employeeObj.isDone =true;
empArr.push(employeeObj);
}
});
fileContent.on('close', function() {
fileContent.close();
});
}
})
}
readFiles(directoryPath);
I am not able to get empArr. After getting the array, I need to save to excel. That part I will try after getting the array of employee objects.
I got it working after reading several articles on closure and promises. The below code works for me and sends me array of employees that are processed.
const directoryPath = './tax/';
function readFiles(dirname) {
fs.readdir(dirname, async function (err,filenames) {
if(err) {
return err;
}
let promiseArr = filenames.map( file=> {
return new Promise((resolve)=>{
processFile(file, resolve)
})
});
Promise.all(promiseArr).then((ret)=>console.log(ret));
})
}
function processFile(file, callback) {
const filePath = path.join(__dirname,directoryPath,file);
const readStream = fs.createReadStream(filePath);
const fileContent = readline.createInterface({
input: readStream
});
let employeeObj = {
empId : '',
TotalEarning:'',
ProfessionalTax:0,
GrossIncome:0,
isDone:false
};
fileContent.on('line', function(line) {
if(!employeeObj.empId && line.includes("Employee:")) {
const empId = line.replace('Employee: ','').split(" ")[0];
employeeObj.empId = empId;
}
else if(line.includes('Total Earnings')) {
const amount = line.replace(/[^0-9.]/g,'');
employeeObj.TotalEarning = amount;
}
else if(line.includes('Profession Tax')) {
const amount = line.split(" ").pop() || 0;
employeeObj.ProfessionalTax = amount;
}
else if(line.includes('Gross Income')) {
const amount = line.replace(/[^0-9.]/g,'');
employeeObj.GrossIncome = amount ||0;
}
else if(line.includes('finance department immediately')) {
employeeObj.isDone = true;
return callback(employeeObj);
}
});
fileContent.on('close', function() {
fileContent.close();
});
}
readFiles(directoryPath);
Surely, the code can be improved further.
I am trying to get the SHA512 of a large file. 2.5 G and maybe more larger file.
I so the approach it's to create an arraybuffer to be digest by the crypto.subtle.digest API.
The problem is i always have a
Array buffer allocation failed
Is it my chunk size, it's there a limit on the array buffer. I got no more idea ?
Or maybe there is a better way to get the hash digest instead use a full arraybuffer ?
// received a file object
function CalculateHash(file)
{
var obj = { File : file};
var chunkSize = 10485760;
const chunksQuantity = Math.ceil(obj.File.size / chunkSize);
const chunksQueue = new Array(chunksQuantity).fill().map((_, index) => index).reverse();
var buffer = null;
reader.onload = async function (evt) {
if (buffer == null) {
buffer = evt.currentTarget.result;
} else {
var tmp = new Uint8Array(buffer.byteLength + evt.currentTarget.result.byteLength);
tmp.set(new Uint8Array(buffer), 0);
tmp.set(new Uint8Array(evt.currentTarget.result), buffer.byteLength);
buffer = tmp;
}
readNext();
}
var readNext = async function () {
if (chunksQueue.length > 0) {
const chunkId = chunksQueue.pop();
const sentSize = chunkId * chunkSize;
const chunk = obj.File.slice(sentSize, sentSize + chunkSize);
reader.readAsArrayBuffer(chunk);
} else {
var x = await digestMessage(buffer);
hash.SHA512 = x.toUpperCase();
buffer = null;
}
}
readNext();
}
async function digestMessage(file) {
const hashBuffer = await crypto.subtle.digest('SHA-512', file); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hashHex;
}
Base on #ArtjomB. answer, the problem was the progressiveHash. The limitation of the BufferArray and the browser.
That is the final worker code. It mixe both approach with native digest that is most very fast than the cryptoJS library. If the file is larger than 1Gb we use the CryptoJS library, if not we use the native browser digest. Any suggestion are welcome!
var window = self;
var document = {};
self.importScripts("/Crypto.min.js");
onmessage = async function (args) {
var obj = args.data;
var reader = new FileReader();
var hash = {};
var chunkSize = 10485760;
var largeFileTrigger = 1048576000;
const chunksQuantity = Math.ceil(obj.File.size / chunkSize);
const chunksQueue = new Array(chunksQuantity).fill().map((_, index) => index).reverse();
var isLargeFile = obj.File.size > largeFileTrigger;
var buffer = null;
var progressiveArray = [];
reader.onload = async function (evt) {
if (isLargeFile) {
progressiveArray.push(evt.currentTarget.result);
} else {
if (buffer == null) {
buffer = evt.currentTarget.result;
} else {
var tmp = new Uint8Array(buffer.byteLength + evt.currentTarget.result.byteLength);
tmp.set(new Uint8Array(buffer), 0);
tmp.set(new Uint8Array(evt.currentTarget.result), buffer.byteLength);
buffer = tmp;
}
}
readNext();
}
var readNext = async function () {
if (chunksQueue.length > 0) {
const chunkId = chunksQueue.pop();
const sentSize = chunkId * chunkSize;
const chunk = obj.File.slice(sentSize, sentSize + chunkSize);
reader.readAsArrayBuffer(chunk);
} else {
var hexHash = null;
if (isLargeFile) {
var sha = CryptoJS.algo.SHA512.create();
for (var i = 0; i < progressiveArray.length; i++) {
sha.update(arrayBufferToWordArray(progressiveArray[i]));
}
hexHash = sha.finalize().toString();
} else {
hexHash = await digestMessage(buffer);
}
SHA512 = hexHash.toUpperCase();
buffer = null;
progressiveArray = null;
postMessage({ Hash: SHA512 });
}
}
readNext();
}
async function digestMessage(file) {
const hashBuffer = await crypto.subtle.digest('SHA-512', file); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hashHex;
}
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}
Working on node.js instagram parser. At the moment I have 1 thread with proxy working code, but I'm not sure how to make multithread architecture:
'use strict';
var InstagramPrivateAPI = {};
InstagramPrivateAPI = {};
InstagramPrivateAPI.V1 = require(__dirname + '/client/v1');
InstagramPrivateAPI.Helpers = require(__dirname + '/helpers');
var acc = require(__dirname + "/client/v1/account");
var med = require(__dirname + "/client/v1/media")
var Promise = require('../bluebird');
var _ = require('../lodash/');
module.exports = InstagramPrivateAPI;
var Client = require('instagram-private-api').V1;
var device = new Client.Device('maksgmn');
var storage = new Client.CookieFileStorage(__dirname + '/cookies/maksgmn.json');
var session = new Client.Session(device, storage);
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
var fs = require('fs');
var proxyArray = fs.readFileSync('proxy.txt').toString().split("\n");
var usernamesArray = fs.readFileSync('usernames.txt').toString().split("\n");
var proxy = "http://" + proxyArray[getRandomInt(0,proxyArray.length)]
var username = usernamesArray[getRandomInt(0,usernamesArray.length)]
console.log(proxy)
console.log(username)
Client.Request.setProxy(proxy);
acc.searchForUser(session, username) //поиск id пользователя
.then(function(profile) {
return profile.id
})
.then(function(someId) { //получение промиса lenta
var feed = new Client.Feed.UserMedia(session, someId);
var lenta = Promise.mapSeries(_.range(0, 1), function() {
return feed.get();
}).then(function(lenta) {
return {id: someId, fd : lenta}
})
return lenta
})
.then(function(results) { //обработка промиса и получение ленты пользователя
// result should be Media[][]
var media1 = _.flatten(results.fd);
var urls1 = _.map(media1, function(medium) {
//var arr1 = medium.params.images[0].url;
var arr1 = []
try {
arr1 = medium.params.images[0].url
} catch (err) {
//console.log("lala")
}
return arr1;
//console.log(medium.params.carouselMedia.images[0].url)
})
//console.log(urls1)
return {id : results.id, linksNoCarousel : urls1, med : media1}
})
.then(function(res){
var urls2 = _.map(res.med, function(medium) {
var arr2 = []
try {
arr2 = medium.params.images[0][0].url
//console.log(arr2)
} catch (err) {
}
return arr2
})
for (var i = 0; i < 5; i++) {
if (typeof res.linksNoCarousel[i] == "undefined")
res.linksNoCarousel[i] = urls2[i];
}
var arr3 = []
for (var i = 0; i < 5; i++) {
arr3[i] = res.linksNoCarousel[i]
}
return {id : res.id, links : arr3}
})
.then(function(mediaAndId) {
acc = acc.getById(session, mediaAndId.id)
.then(function(account) {
//console.log(account.params)
let avatar = account.params.profilePicUrl;
let fullName = account.params.fullName;
let bio = account.params.biography;
let media0 = mediaAndId.links[0];
let media1 = mediaAndId.links[1];
let media2 = mediaAndId.links[2];
let media3 = mediaAndId.links[3];
let media4 = mediaAndId.links[4];
console.log(avatar);
console.log(fullName);
console.log(bio);
console.log(media0);
console.log(media1);
console.log(media2);
console.log(media3);
console.log(media4);
})
})
I would like it to work like multithread to be much more faster with proxies. As far as I'm working with node.js 2nd day, asking that question: how to do that?
Can anyone assist me with loading an array with excel data and returning it as a function? This is my initial code:
var excel = require('exceljs');
var wb = new excel.Workbook();
var path = require('path');
var filePath = path.resolve(__dirname,'data.xlsx');
function signIn(){
var SignIn = [];
wb.xlsx.readFile(filePath).then(function(){
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
});
return SignIn
}
Workbook.readFile is aynchronous, you need to use either a callback or promise type approach. Using promises we can try:
var excel = require('exceljs');
var wb = new excel.Workbook();
var path = require('path');
var filePath = path.resolve(__dirname,'data.xlsx');
function signIn() {
var SignIn = [];
return wb.xlsx.readFile(filePath).then( () => {
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
return SignIn;
});
}
async function testReadData() {
try {
let data = await signIn();
console.log('testReadData: Loaded data: ', data);
} catch (error) {
console.error('testReadData: Error occurred: ', error);
}
}
testReadData();
Or you can use a callback type approach:
function signInWithCallback(callback) {
var SignIn = [];
wb.xlsx.readFile(filePath).then(function(){
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
callback(SignIn);
});
}
signInWithCallback((data) => console.log('Callback: Data: ', data));
How can I handle request fails in this example of axios.all requests. I.e. if all servers are responde with JSON all is okay and I have JSON file at end of a cycle. But if one of this servers not responde with JSON or not responde at all I do have nothing in "/data.json" file, even all other servers are working perfectly. How can I catch a server fail and skip it?
var fs = require("fs");
var axios = require('axios');
var util = require('util');
var round = 0;
var tmp = {};
var streem = fs.createWriteStream(__dirname + '/data.json', {flags : 'w'});
toFile = function(d) { //
streem.write(util.format(d));
};
start();
setInterval(start, 27000);
function start(){
streem = fs.createWriteStream(__dirname + '/data.json', {flags : 'w'});
monitor();
}
function monitor(){
axios.all([
axios.get('server1:api'),
axios.get('server2:api'),
axios.get('server3:api'),
axios.get('server4:api'),
]).then(axios.spread((response1, response2, response3, response4) => {
tmp.servers = {};
tmp.servers.server1 = {};
tmp.servers.server1 = response1.data;
tmp.servers.server2 = {};
tmp.servers.server2 = response2.data;
tmp.servers.server3 = {};
tmp.servers.server3 = response3.data;
tmp.servers.server4 = {};
tmp.servers.server4 = response4.data;
toFile(JSON.stringify(tmp));
round++;
streem.end();
streem.on('finish', () => {
console.error('Round: ' + round);
});
})).catch(error => {
console.log(error);
});
}
The most standard way to approach this would be a recursive function like below.
let promises = [
axios.get('server1:api'),
axios.get('server2:api'),
axios.get('server3:api'),
axios.get('server4:api'),
];
async function monitor() {
const responses = (arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments))[0];
const nextPromise = promises.shift();
if (nextPromise) {
try {
const response = await getSentenceFragment(offset);
responses.push(response);
}
catch (error) {
responses.push({});
}
return responses.concat(await monitor(responses));
} else {
return responses;
}
}
monitor([]).then(([response1, response2, response3, response4]) => {
tmp.servers = {};
tmp.servers.server1 = {};
tmp.servers.server1 = response1.data;
tmp.servers.server2 = {};
tmp.servers.server2 = response2.data;
tmp.servers.server3 = {};
tmp.servers.server3 = response3.data;
tmp.servers.server4 = {};
tmp.servers.server4 = response4.data;
toFile(JSON.stringify(tmp));
round++;
streem.end();
streem.on('finish', () => {
console.error('Round: ' + round);
});
});