Converting node.js buffer into a ref-struct instance - javascript

I am writing a program in both C and Javascript (on node.js), using ffi, ref, and a few other ref- packages.
I have the following code, which I compile into a library libfun.so:
fun.c
#include "fun.h"
#include <stdio.h>
#include <stdlib.h>
void fill_array(void **data_array, int length)
{
int i;
for (i = 0; i < length; i++) {
data_array[i] = malloc(sizeof(data));
((data *)data_array[i])->id = 256;
((data *)data_array[i])->message = 512;
}
}
void print_array(void **data_array, int length)
{
int i = 0;
for (i = 0; i < length; i++) {
printf("(%d, %d)\n", ((data *)data_array[i])->id,
((data *)data_array[i])->message);
}
}
fun.h
#ifndef fun_h__
#define fun_h__
typedef struct {
int id;
int message;
} data;
void fill_array(void **,int);
void print_array(void **,int);
#endif
fun.js
var ffi = require('ffi');
var Struct = require('ref-struct');
var ref = require('ref');
var ArrayType = require('ref-array');
// js analog of the data struct from fun.h
var Data = Struct({
id: ref.types.int,
message: ref.types.int,
});
// js analog of the type data *
var DataPointer = ref.refType(Data);
// pvoid is the js analog of void * in C
var pvoid = ref.refType(ref.types.void);
var PVoidArray = ArrayType(pvoid);
// set up our foreign functions from libfun
var libfun = ffi.Library('./libfun', {
'fill_array' : ['void', [PVoidArray,ref.types.int]],
'print_array' : ['void', [PVoidArray, ref.types.int]]
});
var myArray = new PVoidArray(10);
libfun.fill_array(myArray,10);
libfun.print_array(myArray,10); // this prints the array of structs correctly, from the C side
My question is: how can I print the array of structs from the Javascript side? I want to pass myArray in as a PVoidArray. I do not want to create an array of structs (i.e. create var DataArray = ArrayType(DataPointer), and use that instead of PVoidArray everywhere).
Let's start with myArray[0]. Can we use our variable Data to (in a flexible way) take myArray[0] and make a struct? Like some function bufferToArray(myArray[0],Data) == a Data instance containing the data of myArray[0].

Looking at the documentation for ref.get(), you could use that:
ref.get(myArray.buffer, index, DataPointer).deref()
will return an instance of Data from index of myArray.

Related

Why AEC encryption in .NET yields different result than JavaScript?

I'm going crazy about this one. Spend whole day and still can't understand what is going on. I'm using AES256CBC encryption both in .Net and JavaScript. For some reason I got different results, despite that I'm using same key an iv. My codes are:
JavaScript:
function convertStringToArrayBuffer(str) {
var length = str.length;
var bytes = new Uint8Array(length);
for(var i = 0; i < length; i++) {
bytes[i] = str.charCodeAt(i);
}
return bytes;
}
var keyB64 ="sy/d1Ddy/9K3p8x6pWMq2P8Qw2ftUjkkrAA7xFC7aK8=";
var viB64 = "t8eI2F+QmlUBWZJVIlTX6Q==";
var dataToEnc = "Test123!"
let dataInBytes = convertStringToArrayBuffer(dataToEnc);
let key = window.atob(keyB64);
let iv = window.atob(viB64);
console.log(key);
console.log(iv);
window.crypto.subtle.importKey("raw", convertStringToArrayBuffer(key).buffer, {name: "AES-CBC", length: 256}, false, ["encrypt"]).then(function(key){
console.log(key);
window.crypto.subtle.encrypt({name: "AES-CBC", iv: convertStringToArrayBuffer(iv).buffer}, key, dataInBytes.buffer).then(function(encrypted){
console.log(encrypted);
});
});
This one produces
.Net:
public static void Test()
{
var dataToEnc = "Test123!";
var keyB64 = "sy/d1Ddy/9K3p8x6pWMq2P8Qw2ftUjkkrAA7xFC7aK8=";
var viB64 = "t8eI2F+QmlUBWZJVIlTX6Q==";
var key = Convert.FromBase64String(keyB64);
var iv = Convert.FromBase64String(viB64);
var data = Encoding.UTF8.GetBytes(dataToEnc);
byte[] encrypted = null;
using (Aes aesAlg = Aes.Create())
{
aesAlg.Key = key;
aesAlg.IV = iv;
ICryptoTransform encryptor = aesAlg.CreateEncryptor(aesAlg.Key, aesAlg.IV);
using (MemoryStream msEncrypt = new MemoryStream())
{
using (CryptoStream csEncrypt = new CryptoStream(msEncrypt, encryptor, CryptoStreamMode.Write))
{
using (StreamWriter swEncrypt = new StreamWriter(csEncrypt))
{
swEncrypt.Write(data);
}
encrypted = msEncrypt.ToArray();
}
}
}
}
This one produces
I belive it is something trivial, yet I can't find this. I appreciate any hint here.
If anyone else is having this issue #Topaco's comment is right way to go, I'm pasting it below
The bug is in the C# code. You have to use swEncrypt.Write(dataToEnc) instead of swEncrypt.Write(data). The overload you are currently using implicitly executes data.ToString()

Similar logic in Java and JavaScript, but different results for DFS

I am trying to solve a DFS problem in javascript, the problem is to determine whether the given graph has a path between the given source and destination.
Here is the solution in java
import java.io.*;
import java.util.*;
public class Main {
static class Edge {
int src;
int nbr;
int wt;
Edge(int src, int nbr, int wt){
this.src = src;
this.nbr = nbr;
this.wt = wt;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int vtces = Integer.parseInt(br.readLine());
ArrayList<Edge>[] graph = new ArrayList[vtces];
for(int i = 0; i < vtces; i++){
graph[i] = new ArrayList<>();
}
int edges = Integer.parseInt(br.readLine());
for(int i = 0; i < edges; i++){
String[] parts = br.readLine().split(" ");
int v1 = Integer.parseInt(parts[0]);
int v2 = Integer.parseInt(parts[1]);
int wt = Integer.parseInt(parts[2]);
graph[v1].add(new Edge(v1, v2, wt));
graph[v2].add(new Edge(v2, v1, wt));
}
int src = Integer.parseInt(br.readLine());
int dest = Integer.parseInt(br.readLine());
boolean visited[] = new boolean[vtces];
boolean ans = hasPath(graph , src, dest,visited);
System.out.println(ans);
}
static boolean hasPath( ArrayList<Edge> graph[] ,int src,int dest, boolean[] visited){
if(src == dest){
return true;
}
visited[src] = true;
for(Edge edge : graph[src]){
if( visited[edge.nbr] ){
continue;
}
boolean nbrHasPath = hasPath(graph, edge.nbr, dest, visited);
if(nbrHasPath){
return true;
}
}
return false;
}
}
And here is the JavaScript solution
'use strict';
process.stdin.resume();
process.stdin.setEncoding('utf-8');
let inputString = '';
let currentLine = 0;
process.stdin.on('data', (inputStdin) => {
inputString += inputStdin;
});
process.stdin.on('end', (_) => {
inputString = inputString
.trim()
.split('\n')
.map((string) => {
return string.trim();
});
main();
});
function readline() {
return inputString[currentLine++];
}
function readIntArray() {
return readline()
.split(' ')
.map((num) => parseInt(num));
}
function readFloatArray() {
return readline()
.split(' ')
.map((num) => parseFloat(num));
}
/*=====================START CODING HERE=====================*/
class Edge {
constructor(source, neighbour, weight){
this.source = source
this.neighbour = neighbour
this.weight = weight
}
}
function main() {
const vertices = parseInt(readline());
const edges = parseInt(readline());
const graph = new Array(vertices).fill([])
for(let i = 0 ; i < edges; i++){
let [s, d, w] = readIntArray()
graph[s].push(new Edge(s,d,w))
graph[d].push(new Edge(d,s,w))
}
const source = parseInt(readline());
const destination = parseInt(readline());
let visited = new Array(vertices).fill(false)
console.log(hasPath( graph, source, destination,visited ))
}
function hasPath(graph, source, dest, visited){
if(source === dest){
return true
}
visited[source] = true
for(let i = 0; i < graph[source].length; i++){
let edge = graph[source][i]
if( visited[edge.neighbour] ){
continue;
}
let nbrHasPath = hasPath(graph, edge.neighbour, dest , visited)
if(nbrHasPath){
return true
}
}
return false
}
The function haspath is the point of interest here, the java solution passes all the test cases, the javascript solution however fails in one test case that is :
7
7
0 1 10
1 2 10
2 3 10
0 3 10
4 5 10
5 6 10
4 6 10
0
6
The function is required to return a boolean value, for the above-mentioned test case, the java solution returns false whereas the js solution returns true
I am not able to figure out what am I doing wrong in JavaScript, Any help is appreciated.
There is a subtle bug on this line:
const graph = new Array(vertices).fill([]);
This creates only two arrays. new Array(vertices) creates the first new array, and then .fill([]) creates the second and fills the first with references to the second.
So every array in graph is actually the same array. Try running this code to see:
const graph = new Array(5).fill([]);
graph[0].push('hello');
console.log(graph[0][0]); // prints "hello";
console.log(graph[1][0]); // also prints "hello";
console.log(graph[2][0]); // also prints "hello";
console.log(graph[3][0]); // also prints "hello";
console.log(graph[4][0]); // also prints "hello";
You can do this to fix that:
const graph = new Array(vertices).fill().map(() => []);
There may be other problems as well but that's what jumped out at me.

Emscripten OOB Exception within C from Javascript

I'm trying to pass an array of strings from JS to C using Emscripten. I pass my array to C but when I try to deference, it does not work, it raises "index out of bounds". I've tried many different things so I'll post the minimal amount of code that can reproduce the issue (at least on my machine!). I'm sure I missed something...
Basically trying to put a string into an array (of strings) and give that to C for printing (I'm doing other stuff of course). Feel free to change the "gmp_" or "mpz_" to your (that's not the problem).
Javascript:
var nbData = 1;
var nbColPerData = 1;
var data = new Uint32Array(nbColPerData);
data[0] = this.i2wasm(this.mod);
var nBytes = data.length * data.BYTES_PER_ELEMENT;
var dataPtr = Module._malloc(nBytes);
var dataHeap = new Uint8Array(Module.HEAP8.buffer,dataPtr,nBytes);
dataHeap.set(new Uint8Array(data.buffer));
// create array of pointers
var pointers = new Uint32Array(nbData);
for (var i =0; i < pointers.length; i++) {
pointers[i] = dataPtr + i * data.BYTES_PER_ELEMENT;
console.log("pointers["+i+"] = " + pointers[i].toString(16));
}
// create pointer array on the heap
var nPointerBytes= pointers.length * pointers.BYTES_PER_ELEMENT;
var pointerPtr = Module._malloc(nPointerBytes);
var pointerHeap = new Uint8Array(Module.HEAP8.buffer, pointerPtr,nPointerBytes);
pointerHeap.set( new Uint8Array(pointers.buffer));
printIntMatrix(pointerHeap,nbData)
Module._free(pointerHeap.byteOffset);
Module._free(dataHeap.byteOffset);
i2wasm (hexadecimal string to int array for wasm, works properly):
return allocate(intArrayFromString(integer),'i8',ALLOC_NORMAL);
C code:
void EMSCRIPTEN_KEEPALIVE printInt(char *hex) {
mpz_t n;
mpz_init(n);
printf("printing INT at %p\n",hex);
/////////////////////////////////////
// HERE IT PANICS !
/////////////////////////////////////
printf("printing INT value: %c\n",*hex);
if (mpz_set_str(n,hex,16) != 0) {
printf("hexadecimal invalid");
return;
}
gmp_printf("printInt: %Zd\n",n);
mpz_clear(n);
}
void EMSCRIPTEN_KEEPALIVE printIntMatrix(char **mat, int n) {
printf("printIntMatrix !!\n");
for(int i = 0; i < n; i++) {
printf("matrix[%d] => \n",i);
printf("matrix[%d] => %p\n",i,mat[i]);
printInt(mat[i]);
printf("matrix[%d] => %p DONE\n",i,mat[i]);
}
}
i2wasm() returns char*, so dataPtr is char**.
And _printIntMatrix(pointerHeap, nbData) means _printIntMatrix(pointerHeap | 0, nbData), eventually it is _printIntMatrix(NULL, nbData).
So if printIntMatrix doesn't need char ***mat, _printIntMatrix(dataPtr, nbData); would work.

SHA512HMAC in node.js does not match C#

I have a simple node.js script
var text = "Hello!";
var serverSeed = "d8818b38a14e7461e87301ad4b9809b558bcbca816b650cd470452e018ada255";
var crypto = require('crypto');
var hash = crypto.createHmac('sha512', serverSeed).update(text).digest('hex');
console.log(hash);
I also have the C# program
using System;
using System.Text;
using System.Security.Cryptography;
public class Program
{
public static byte[] StringToByteArray(String hex)
{
int NumberChars = hex.Length;
byte[] bytes = new byte[NumberChars / 2];
for (int i = 0; i < NumberChars; i += 2)
bytes[i / 2] = Convert.ToByte(hex.Substring(i, 2), 16);
return bytes;
}
public static string ByteArrayToString(byte[] ba)
{
StringBuilder hex = new StringBuilder(ba.Length * 2);
foreach (byte b in ba)
hex.AppendFormat("{0:x2}", b);
return hex.ToString();
}
public static void Main()
{
var serverSeed = StringToByteArray("d8818b38a14e7461e87301ad4b9809b558bcbca816b650cd470452e018ada255");
using (var sha = new HMACSHA512(serverSeed))
{
var hash = sha.ComputeHash(Encoding.ASCII.GetBytes("Hello!"));
Console.WriteLine(ByteArrayToString(hash));
}
}
}
(runable version here)
I get from the node.js program
99e3b20acaa9c7674f074da950945ee897876b0afc02121d5a89fa581081465f3e01a084e9b05bed729b7fbdc1d485fb38af7d6f501cbc258b6c66add54410ba
And from the C# program
73250817a927f394b0912afcece47b8c12aeaed31892c64116ae9dd0d407f6e31d5c062d65f68a3cae09a8acb14a7cef1f6afd99f5a22f2b73e46a991fcd079a
What am I doing wrong to cause this difference?
Your C# code is converting the hex characters in your seed to a byte array based on the what the characters represent in hexadecimal format.
But your node code is passing the seed as a string which is converting the characters to a byte.
So for example, your C# code is converting a to a byte with the value of 10 but your node code would be converting a to a byte with the value of 97
Your node code either needs to convert the hex in your string to a Buffer based on their hex values as you are doing in C#.
var text = "Hello!";
var serverSeed = "d8818b38a14e7461e87301ad4b9809b558bcbca816b650cd470452e018ada255";
var crypto = require('crypto');
var buff = new Buffer(seed, "hex")
var hash = crypto.createHmac('sha512', buff).update(text).digest('hex');
console.log(hash);
Or in C# instead of converting hex to bytes you can get a byte array representing the actually characters the seed using GetBytes from a System.Text.Encoding instance.
var serverSeed = Encoding.ASCII.GetBytes("d8818b38a14e7461e87301ad4b9809b558bcbca816b650cd470452e018ada255");
using (var sha = new HMACSHA512(serverSeed))
{
var hash = sha.ComputeHash(Encoding.ASCII.GetBytes("Hello!"));
Console.WriteLine(ByteArrayToString(hash));
}
Most likely you intended to pass the serverSeed as a buffer in the node.js code.

Assign a buffer location to imageData.data instead of assigning values with loop

I work with emscripten where I create a bitmap in C++ which I pass to my javascript code
const int COMP = 4;
long createBitmap(int DIM)
{
srand(NULL);
// create buffer
long buffer = EM_ASM_INT(
{
var buffer = Module._malloc($0 * 1);
return buffer;
}, DIM*DIM*COMP);
uint8_t* bitmap = (uint8_t*)buffer;
//just randomly fill the buffer
for (int i = 0; i < DIM*DIM*COMP; i++)
{
if (i%4==3)
bitmap[i] = 128;
else
bitmap[i] = rand()%256;
}
return buffer;
}
and in javascript I have the following code:
var dim = 1080;
var buffer = Module.createBitmap(dim);
var c = document.getElementById("bitmap");
var ctx = c.getContext("2d");
var imgData = ctx.createImageData(dim,dim);
for (var i = 0; i < dim*dim*4; i++) {
imgData.data[i] = Module.HEAPU8[buffer+i];
}
ctx.putImageData(imgData, 0, 0)
This works well but I don't like the loop to assign all the elements of the buffer to the imgData.data array. I know that the uint8_t data type that I use in C++ corresponds to the Uint8ClampedArray for imgData.data. To me this seems like a good opportunity to just assign the beginning of the buffer to this imgData.data and I won't have to copy anything - is this possible?
I've gotten this answer from the emscripten google group.
The answer is actually quite simple.
var mappedBuffer= new Uint8ClampedArray(Module.HEAPU8.buffer, buffer, dim * dim * 4)
So HEAPU8.buffer accesses the underlying buffer and with that I can create an Uint8ClampedArray typed array.
Then you can simply write
imgData.data.set(mappedBuffer)
ctx.putImageData(imgData, 0, 0)

Categories

Resources