Trouble tracing error "Invalid Metadata Provided" - javascript

I followed this tutorial to learn how to use the tensorflow.js model mobilenet in node.js:link
Now I am trying to use my own tensorflow.js model trained in teachable machine using the #teachablemachine/image package: link
Here is my code:
const tf = require('#tensorflow/tfjs');
const tfnode = require('#tensorflow/tfjs-node');
const tmImage = require('#teachablemachine/image');
const fs = require('fs');
const path = require('path');
const FileAPI = require('file-api'),
File = FileAPI.File;
global.FileReader = FileAPI.FileReader;
global.Response = require('response');
const uploadModel = "model.json"
const uploadModelPath = path.join(process.cwd(), uploadModel);
const uploadModelFile = new File({
name: "model.json",
type: "application/json",
path: uploadModelPath
});
const uploadWeights = "weights.bin"
const uploadWeightsPath = path.join(process.cwd(), uploadWeights);
const uploadWeightsFile = new File({
name: "weights.bin",
path: uploadWeightsPath
});
const uploadMetadata = "metadata.json"
const uploadMetadataPath = path.join(process.cwd(), uploadMetadata);
const uploadMetadataFile = new File({
name: "metadata.json",
type: "application/json",
path: uploadMetadataPath
});
const readImage = path => {
const imageBuffer = fs.readFileSync(path);
const tfimage = tfnode.node.decodeImage(imageBuffer);
return tfimage;
}
const imageClassification = async path => {
const image = readImage(path);
const model = await tmImage.loadFromFiles(uploadModelFile,uploadWeightsFile,uploadMetadataFile);
//const model = await tmImage.load('https://teachablemachine.withgoogle.com/models/25uN0DSdd/model.json','https://teachablemachine.withgoogle.com/models/25uN0DSdd/metadata.json');
const predictions = await model.predict(image);
console.log('Classification Results:', predictions);
}
if (process.argv.length !== 3) throw new Error('Incorrect arguments: node classify.js <IMAGE_FILE>');
imageClassification(process.argv[2]);
When I run it I get error:
> (node:94924) UnhandledPromiseRejectionWarning: Error: Invalid Metadata provided
at C:\Users\Awesome\Google Drive\Source\Programming\JS\Testing\node_modules\#teachablemachine\image\dist\custom-mobilenet.js:163:27
Which leads me to:
var processMetadata = function (metadata) { return __awaiter(void 0, void 0, void 0, function () {
var metadataJSON, metadataResponse;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
if (!(typeof metadata === 'string')) return [3 /*break*/, 3];
return [4 /*yield*/, fetch(metadata)];
case 1:
metadataResponse = _a.sent();
return [4 /*yield*/, metadataResponse.json()];
case 2:
metadataJSON = _a.sent();
return [3 /*break*/, 4];
case 3:
if (isMetadata(metadata)) {
metadataJSON = metadata;
}
else {
throw new Error('Invalid Metadata provided');
}
_a.label = 4;
case 4: return [2 /*return*/, fillMetadata(metadataJSON)];
}
});
}); };
full file here: link
So I can see case 0-2 aren't being triggered and for case 3 the metadata file isn't passing the isMetadata function which is:
var isMetadata = function (c) {
return !!c && Array.isArray(c.labels);
};
Which I think tests that the file is not undefined and has an array of labels.
Where to go from there I am not sure because I don't understand the rest of the code in that file. I am going to try an alternative approach but I thought I might post this encase someone with more experience can see the problem clearly and wants to help teach me something or point me in the right direction or just simply tell me that at my experience level this isn't the right use of my time.
Thanks for reading.

Related

Nojde does not await for async function and array.length condition

Trying to implement Twitter API to post tweets with multiple images. I am posting requests from the admin dashboard with an AD id(not the Twitter ad) , fetching the images URL from our database and using the URLs to write image files in the upload directory. Then using the Twitter-api-2 package to post a request to Twitter API to get the mediaIdS and post the tweet.
Problem: When I write files to local upload folders, the async function also get executed, therefore cannot find the media files in the local folder, leading to an error.
const router = require('express').Router()
const { parse } = require('dotenv');
const { link } = require('joi');
const { TwitterApi } = require('twitter-api-v2')
const { FetchSingleAdBasics } = require('../helpers/fetch-single-ad-basics');
const request = require('request');
const fs = require('fs');
const path = require('path');
const https = require('https')
function saveImagesToUploads(url, path){
const fullUrl = url
const localPath = fs.createWriteStream(path)
const request = https.get(fullUrl, function(response){
console.log(response)
response.pipe(localPath)
})
}
var jsonPath1 = path.join(__dirname,'../..','uploads/0.png');
var jsonPath2 = path.join(__dirname,'../..','uploads/1.png');
var jsonPath3= path.join(__dirname,'../..','uploads/2.png');
var jsonPath4 = path.join(__dirname,'../..','uploads/3.png');
router.post('/twitter-post', async(req, res) => {
const {adId} = req.body
const imagesArr = []
const imageIdsArr = []
const {text} = req.body
const AD = adId && await FetchSingleAdBasics(adId);
const PostMessage = `${AD?.year} ${AD?.make} ${AD?.model} ${AD?.trim}\r\n${AD?.engineSize}L Engine\r\nPrice: AED${AD?.addetails[0].price}\r\nMileage: ${AD?.mileage} - ${AD?.mileageUnit}\r\nMechanical Condition: ${AD?.addetails[0].mechanicalCondition}\r\nAvailable in: ${AD?.adcontacts[0]?.location}\r\nCheckout full details at: https://ottobay.com/cars/uc/${AD?.id}`
if (!AD)
return res
.status(422)
.json({ message: "failed", error: "Ad Not Found" })
try {
imagesArr.push(await AD?.adimages[0]?.LeftSideView)
imagesArr.push(await AD?.adimages[0]?.LeftFront)
imagesArr.push(await AD?.adimages[0]?.Front)
imagesArr.push(await AD?.adimages[0]?.FrontRight)
// the following function must await for this to finish
imagesArr?.map((item,index) => {
saveImagesToUploads(item, "./uploads/" + `${index}`+ '.png')
})
const filesArr = [jsonPath1,jsonPath3,jsonPath4,jsonPath2]
console.log(filesArr)
console.log(filesArr?.length)
const idsArray = []
// this function get executed without waiting for previous function, leading to error
// this function does not apply filesArr?.length === 4 condition
filesArr?.length === 4 && await Promise.all(filesArr?.length === 4 && filesArr?.map(async (item) => {
try {
const mediaId = await client.v1.uploadMedia(item,{ mimeType : 'png' })
idsArray.push(mediaId)
return imageIdsArr;
} catch(err) {
console.log(err)
throw err;
}
}));
const response = idsArray?.length === 4 && await client.v1.tweetThread([{ status: PostMessage, media_ids: idsArray }]);
// remove files after successfull tweet
await fs.promises.readdir(jsonUploadPath).then((f) => Promise.all(f.map(e => fs.promises.unlink(`${jsonUploadPath}${e}`))))
res.json({status: 'success', response})
} catch (error) {
res.json({status: 'failed',error})
// console.log("tweets error", error.data.errors);
}
})
module.exports = router

JavaScript Azure Blob Storage move blob

I have a NodeJS backend which use the official blob storage library (#azure/storage-blob) from Microsoft to manage my Blob Storage:
https://www.npmjs.com/package/#azure/storage-blob
It is necessary to move a blob from one folder, to another.
Unfortunately I can't find any documentation for that.
What I did until now is:
const { BlobServiceClient } = require("#azure/storage-blob");
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.storageconnection);
const containerClient = blobServiceClient.getContainerClient('import');
const blobClient = containerClient.getBlobClient('toImport/' + req.body.file);
const downloadBlockBlobResponse = await blobClient.download();
... do some stuff with the value of the files
Like you can see in the code, I read a file from folder "toImport". After that I want to move the file to another folder "finished". Is that possible? Maybe I need to create a copy of the file and delete the old one?
As such move operation is not supported in Azure Blob Storage. What you have to do is copy the blob from source to destination, monitor the copy progress (because copy operation is asynchronous) and delete the blob once the copy is complete.
For copying, the method you would want to use is beginCopyFromURL(string, BlobBeginCopyFromURLOptions).
Please see this code:
const { BlobServiceClient } = require("#azure/storage-blob");
const connectionString = "DefaultEndpointsProtocol=https;AccountName=account-name;AccountKey=account-key;EndpointSuffix=core.windows.net";
const container = "container-name";
const sourceFolder = "source";
const targetFolder = "target";
const blobName = "blob.png";
async function moveBlob() {
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
const containerClient = blobServiceClient.getContainerClient(container);
const sourceBlobClient = containerClient.getBlobClient(`${sourceFolder}/${blobName}`);
const targetBlobClient = containerClient.getBlobClient(`${targetFolder}/${blobName}`);
console.log('Copying source blob to target blob...');
const copyResult = await targetBlobClient.beginCopyFromURL(sourceBlobClient.url);
console.log('Blob copy operation started successfully...');
console.log(copyResult);
do {
console.log('Checking copy status...')
const blobCopiedSuccessfully = await checkIfBlobCopiedSuccessfully(targetBlobClient);
if (blobCopiedSuccessfully) {
break;
}
} while (true);
console.log('Now deleting source blob...');
await sourceBlobClient.delete();
console.log('Source blob deleted successfully....');
console.log('Move operation complete.');
}
async function checkIfBlobCopiedSuccessfully(targetBlobClient) {
const blobPropertiesResult = await targetBlobClient.getProperties();
const copyStatus = blobPropertiesResult.copyStatus;
return copyStatus === 'success';
}
moveBlob();
The previous best solution seem working but I don't like use an infinite loop.
So this is an alternative way to move blob file
const move = async (
fileName: string,
src: string,
dest: string
) => {
try {
const = blobServiceClient = BlobServiceClient.fromConnectionString();
logger.info(`Move storage file [ ${src} -> ${dest} | ${fileName} ]`);
const srcContainerClient = blobServiceClient.getContainerClient(src);
const destContainerClient =
blobServiceClient.getContainerClient(dest);
const blobClient = srcContainerClient.getBlobClient(fileName);
const downloadBlockBlobResponse = await blobClient.download();
const buffer = await streamToBuffer(
downloadBlockBlobResponse.readableStreamBody!
);
blobClient.delete();
const blockBlobClient = containerClient.getBlockBlobClient(fileName);
await blockBlobClient.upload(buffer, buffer.length);
return `${this.storageUrl}/${containerClient.containerName}/${fileName}`;
} catch (e) {
throw new Error(
`Fail to move storage file [ ${src} -> ${dest} | ${fileName} ]`
);
}
};
const streamToBuffer = async (readableStream: NodeJS.ReadableStream): Promise<Buffer> => {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
readableStream.on("data", (data) => {
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
});
readableStream.on("end", () => {
resolve(Buffer.concat(chunks));
});
readableStream.on("error", reject);
});
};

Error Export Module in Node.js - separation of concerns

I am trying to implement separation of concerns by using export module. All the code is working if used without separation of concern but as soon as I am trying to import generateUrlArray() from const db = require('../db') nothing is working. Nodejs is not giving me any error on the back-end. The error I am getting on front-end is Error: SyntaxError: Unexpected end of JSON input . I am positive that the error is coming from back-end. Let me know if you have any ideas.
controller.js
const db = require('../db')
exports.getWebApiList = (req, res) => {
(async function fetchDataList() {
try {
const urlArray = await db.generateUrlArray({}, { _id: 0 })
return res.send(urlArray)
} catch (ex) {
console.log(`fetchDataList error: ${ex}`)
}
})()
}
..db/index.js
const { List } = require('./models/List')
const generateUrlArray = (query, projection) => {
const dataFromDB = List.find(query, projection).select('symbol')
return linkArray = dataFromDB.map(item => {
return link = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${item.symbol}&apikey=6BUYSS9QR8Y9HH15`
})
}
module.exports = { generateUrlArray }
.models/List.js
const mongoose = require('mongoose')
mongoose.Promise = global.Promise
const ParentSchemaSymbolList = new mongoose.Schema({
symbol: String
})
module.exports.List = mongoose.model('List', ParentSchemaSymbolList)
const generateUrlArray = async (query, projection) => {
const dataFromDB = await List.find(query, projection).select('symbol')
const linkArray = dataFromDB.map(item => {
return link = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${item.symbol}&apikey=6BUYSS9QR8Y9HH15`
})
return linkArray
}

How to implement a basic unit test in javascript for an azure durable function orchestration

What would be the unit test which would fake the calls to callActivity in the orchestrator below to return a known value and to expect that the orchestrator returns that value.
The examples on the azure durable functions documentation for unit testing[1] are all written in C# and I've not been able to replicate
them in javascript despite several attempts. This is because I don't know how to construct an orchestrator with a fake context.
const df = require('durable-functions');
module.exports = df.orchestrator(function* orchestratorFunctionGenerator(context) {
const input = context.df.getInput();
const apimApiName = input.apimApiName;
const indexNames = yield context.df.callActivity('GetIndexNames', apimApiName);
const indexerName = indexNames.idle;
const indexerStatus = yield context.df.callActivity('GetIndexerStatus', indexerName);
return indexerStatus;
});
[1] https://learn.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-unit-testing
The approach we went with was to extract the generator method out into it's own module.
module.exports = function* orchestratorFunctionGenerator(context) {
const input = context.df.getInput();
const apimApiName = input.apimApiName;
const indexNames = yield context.df.callActivity('GetIndexNames', apimApiName);
const indexerName = indexNames.idle;
const indexerStatus = yield context.df.callActivity('GetIndexerStatus', indexerName);
return indexerStatus;
};
then require it
const df = require('durable-functions');
const generatorFunction = require('./generator-function');
module.exports = df.orchestrator(generatorFunction);
and then test the function in isolation
const chai = require('chai');
const sinon = require('sinon');
const getIndexerStatusOrchestratorGenerator = require('../../GetIndexerStatusOrchestrator/generator-function');
const expect = chai.expect;
function iterateGenerator(generator) {
let result = generator.next();
while (!result.done) {
result = generator.next(result.value);
}
return result;
}
describe('getIndexerStatusOrchestrator', () => {
it('happy path should return \'inProgress\'', () => {
const indexNames = { active: 'index-1', idle: 'index-2' };
const apimApiName = 'api';
const input = { apimApiName };
const stubCallActivity = sinon.stub();
stubCallActivity.withArgs('GetIndexNames', apimApiName).returns(indexNames);
stubCallActivity.withArgs('GetIndexerStatus', indexNames.idle).returns('inProgress');
const context = {
df: {
callActivity: stubCallActivity,
getInput: sinon.fake.returns(input),
},
};
const generator = getIndexerStatusOrchestratorGenerator(context);
const result = iterateGenerator(generator);
expect(result.value).to.equal('inProgress');
});
it('indexer status should be for the idle index', () => {
const indexNames = { active: 'index-1', idle: 'index-2' };
const apimIndexName = 'api';
const input = { apimApiName: apimIndexName };
const stubCallActivity = sinon.stub();
stubCallActivity.withArgs('GetIndexNames', apimIndexName).returns(indexNames);
stubCallActivity.withArgs('GetIndexerStatus', indexNames.idle);
// use stub as a mock since we need both stub and mock behaviour
// for 'callActivity' and this was the easier option
stubCallActivity.withArgs('GetIndexerStatus').callsFake((method, indexerName) => {
expect.fail(`Unexpected indexer name ${indexerName}`);
});
const context = {
df: {
callActivity: stubCallActivity,
getInput: sinon.fake.returns(input),
},
};
const generator = getIndexerStatusOrchestratorGenerator(context);
iterateGenerator(generator);
// expectations set above
});
});
As might be expected this a trivial example of an orchestrator. We have orchestrators which have considerably more logic in them and where the tests will have more value.
In addition, I personally would not use the mocking approach in the 2nd test and would just rely on testing the outputs using stubs to fake dependency interactions.

Javascript TypeError: var is not a function

Working with Vanilla NodeJS. (i.e. no framework)
Have a function in index.js called toData() that I want to use inside an event handler.
the toData() function is exported as the default export in index.js.
The relevant portions of index.js looks like this:
function pushToData(input, title) {
data[title] = input;
console.log(data);
if (isDataFull()) {
mainEvents.emit("format-export");
}
}
module.exports = pushToData;
The file where I'm importing and attempting to use it looks like this:
const mssql = require("mssql");
const events = new require("events");
const tagList = require("./tag");
const toData = require("./index");
const tagEvents = new events.EventEmitter();
const sqlAuth = {
/*....SQL Auth Details....*/
};
tagEvents.on("b2bloaded", function(data) {
data = tagList(data, "ESB", "CustomerID");
toData(data, "B2B");
});
/*....Additional event handlers omitted due to similarity...*/
function generateSqlData(item) {
const pool = new mssql.ConnectionPool(sqlAuth, err => {
switch (item) {
case "B2B":
pool
.request()
.query(
`select top (100) * from [AspDotNetStoreFrontB2B].[dbo].[Customer]`
)
.then(res => {
tagEvents.emit("b2bLoaded", res.recordset);
})
.catch(err => console.error(err));
break;
/*....Other Cases Omitted as they operate similar....*/
default:
break;
}
});
}
module.exports = {
generateSqlData
};
The big issue is that I get a:
null: TypeError: toData is not a function message: "toData is not a function" stack: "TypeError: toData is not a function
at EventEmitter.<anonymous> (c:\Users\rutherfordc\Documents\GitHub\Migratron\sql-actions.js:36:3)
at EventEmitter.emit (events.js:160:13)
at pool.request.query.then.res (c:\Users\rutherfordc\Documents\GitHub\Migratron\sql-actions.js:87:23)
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:160:7)"
proto: Error {constructor: , name: "TypeError", message: "", …}
I'm not sure why it's not running as expected. Why is the function not defined properly, or not running when called?
I suggest to export your fonction like this :
export.pushToData(input, title) {
data[title] = input;
console.log(data);
if (isDataFull()) {
mainEvents.emit("format-export");
}
}
and use it this way :
const mssql = require("mssql");
const events = new require("events");
const tagList = require("./tag");
const toData = require("./index");
const tagEvents = new events.EventEmitter();
const sqlAuth = {
/*....SQL Auth Details....*/
};
tagEvents.on("b2bloaded", function(data) {
data = tagList(data, "ESB", "CustomerID");
toData.pushToData(data, "B2B");
});

Categories

Resources