Trying to query aws s3 object Using AWS query with select - javascript

Below is the piece of code i have written , to get the result but null in response
I am using selectObjectContent api to get the results with the simple SQL query
const bucket = 'myBucketname'
const key = 'file.json.gz'
const query = "SELECT * FROM s3object "
const params = {
Bucket: bucket,
Key: key,
ExpressionType: "SQL",
Expression: query,
InputSerialization: {
CompressionType: "GZIP",
JSON: {
Type: "LINES"
},
},
OutputSerialization: {
JSON: {
RecordDelimiter: ","
}
}
}
s3.selectObjectContent(params,(err, data) => {
if (err) {
console.log(data)
} else {
console.log(err)
}
})

I have found the solution to it. was logging error when getting successfull result/data , so corrected it below. Also i have found the way to read stream buffer data
s3.selectObjectContent(params,(err, data) => {
if (err) {
console.log(err)
} else {
console.log(data)
}
})
const eventStream = data.Payload;
// Read events as they are available
eventStream.on('data', (event) => {
if (event.Records) {
// event.Records.Payload is a buffer containing
// a single record, partial records, or multiple records
var records = event.Records.Payload.toString();
console.log( records )
} else if (event.Stats) {
console.log(`Processed ${event.Stats.Details.BytesProcessed} bytes`);
} else if (event.End) {
console.log('SelectObjectContent completed');
}

Related

How to use sql returning id in front-end JavaScript?

I have this request in server.js file.
app.post("/insertRowtoMain", (req, res) => {
const {nodeid, maintenancetype, personnel, process, date} = req.body;
//console.log("description",description)
let insertQuery = `insert into maintenance(nodeid,maintenancetype, personnel, process, date)
values(${nodeid},'${maintenancetype}',${personnel},'${process}', '${date}') returning id`
pool.query(insertQuery, (err, result) => {
if (!err) {
console.log("insertRowtoMain", result.rows);
res.status(200).send(result.rows);
} else {
res.status(404).json(err.message)
console.log("insertRowtoMain error", err.message)
}
})
})
And I am calling this request function in front-end with this code:
const addNewMainTypes = async () => {
try {
await axios.post(`${serverBaseUrl}/insertRowtoMain`, {
nodeid: newMaintenance.nodeid,
maintenancetype: newMaintenance.maintenancetype,
personnel: newMaintenance.personnel,
process: newMaintenance.process,
date: newMaintenance.date,
});
} catch (err) {
throw err;
}
const maintenance = await getMain();
// console.log("main list", maintenanceList);
setMaintenance(maintenance);
const maintenanceList = await getMainTypes();
// console.log("main list", maintenanceList);
setMaintenanceList(maintenanceList);
};
When I insert a new row to this function, I got the returning id in server.js terminal.
How can I use that Id in front-end?
Save the response of the POST request in a variable and access the data property
// Here, "data" will be a variable with the response data
const { data } = await axios.post(`${serverBaseUrl}/insertRowtoMain`, {
nodeid: newMaintenance.nodeid,
maintenancetype: newMaintenance.maintenancetype,
personnel: newMaintenance.personnel,
process: newMaintenance.process,
date: newMaintenance.date,
});
/* Seems like your API is returning an array of objects with "id" property, so, for example... */
// The following should console.log the first element's id of the array
console.log(data[0]?.id);

Opening Web Socket connection from app.get (Express)

I am running the Bitfinex Websocket connection from node.js server using express.
I have an API endpoint so that I ask for the specific book from the link (e.g http://localhost:4000/BTC-USD/buy/100)
The problem is that when I try to make the connection from the app.get the websocket doesn't respond
It only responds from outside. The problem is that, that way I cant pass the parameter so I can establish the proper connection. The code I can't perform
app.get("/:pair/:type/:amount", async (req,res) => {
let { pair, type, amount } = req.params;
try {
wsConnection(pair);
wsMessageHandler()
const result = await simulateEffectivePrice({ pair, type, amount })
res.send({ "effectivePrice" : result })
} catch (error) {
res.send({"error" : error.message})
}
})
The code that works:
wsConnection();
wsMessageHandler()
app.get("/:pair/:type/:amount", async (req,res) => {
let { pair, type, amount } = req.params
try {
// if (type !== "buy" || type !== "sell") throw new Error ("wrong type input")
const result = await simulateEffectivePrice({pair,type,amount})
res.send({ "effectivePrice" : result })
} catch (error) {
res.send({"error" : error.message})
}
})
The wsConnection functions is this, (it requires the book you want to receive information from)
const wsConnection = async () => {
let msg = JSON.stringify({
event: 'subscribe',
channel: 'book',
symbol: 'tBTCUSD',
// len: "25"
})
try {
w.on('open', () => {
w.send(JSON.stringify({ event: 'conf', flags: 65536 + 131072 }))
w.send(msg)
})
} catch (error) {
throw new Error("BUILD CUSTOM ERROR")
}
}
"symbol" would need to be the parameter specified on the endpoint by the user
Thank you very much

Electron await sqlite3 response

I have a sqlite3 database request in my main.js, that is triggered by button click in renderer.js.
The request reaches my main.js. However, I cannot manage to await the results from the database. The issue occurs already in main.js, so I'm stuck even before anything is passed back to the renderer.js.
I hope someone can tell me what I am missing.
Here is my code:
renderer.js
$(document).on('click','#mybtn',function(e){
let query = "SELECT id, name FROM table1"
// send (here is the issue)
window.api.send("db-query", query)
// (next step: receive, might be wrong but not yet my problem)
window.api.receive(channel="receive-db-data", (data) => {
console.log(data);
});
});
main.js
ipcMain.on(channel='db-query', async (e, query) => {
console.log('query received: ' + query);
let data = await db_request(query).then(
function(value) {
console.log('value: ' + value);
return value;
},
function(error) {
console.log('error fetching data from db on query:' + query);
}
)
console.log("response ready: " + data); //returns undefined if 'return value' is used (otherwise nothing)
// to send back to renderer.js later
e.sender.send("db-data", data)
})
let db_request = async (query) => {
let data = []
var sqlite3 = require('sqlite3').verbose();
var dbPath = require('path').resolve(__dirname, '../../Fin.db')
var db = new sqlite3.Database(dbPath)
db.serialize(function(){
db.each(query, function(err, row) {
console.log(row)
data.push({"id": row.id, "name": row.name})
});
});
db.close();
console.log('db_request:' + data)
return data
}
And this is how my terminal looks like:
query received: SELECT id, type, name FROM table1
db_request:
value:
response ready: undefined
{ id: 1, name: 'a' }
{ id: 2, name: 'b' }
{ id: 3, name: 'c' }
You have to convert db_request result to a Promise, and the promise will be resolved when all rows are pushed to the data. When you use the await keyword, there is no need to handle a promise with .then chain.
main.js will look like this:
const sqlite3 = require('sqlite3').verbose();
const dbPath = require('path').resolve(__dirname, '../../Fin.db')
ipcMain.on(channel='db-query', async (e, query) => {
console.log('query received: ' + query)
try {
const data = await db_request(query); // remove .then
console.log('value: ' + data)
// to send back to renderer.js later
e.sender.send("db-data", data)
} catch (error) {
console.log('error fetching data from db on query:' + query);
e.sender.send("db-data", []) // send empty data or error ???
}
})
let db_request = (query) => {
const db = new sqlite3.Database(dbPath)
return new Promise((resolve, reject) => { // return a promise
// I think you dont need serialize for this case
const data = []
db.each(query, (err, row) => {
console.log(err, row)
if (!err) {
data.push({"id": row.id, "name": row.name})
}
}, (error) => {
if (error) {
reject(error)
} else {
resolve(data)
}
});
})
}

Save responses to multiple GET requests in a single local json file - node.js

The problem:
I have a function that maps over countries and regions and creates an array of urls, then makes a GET request to each one. I want to save the responses in a single json file, and I want this function to handle that as well.
Expected results:
I expected to be able to run the function as needed (like when source data is updated), and get a new or updated local json file with all the data objects in one array.
Actual results:
A file with only one record, an array with the last response object.
What I've tried:
I tried using fs.writeFile and fs.readFile. I did not get any errors, but the resulting file had only one record, even though console showed all the requests being made. It seemed that each response was being written over the previous.
Minimum reproducable (node.js) example:
const fs = require('fs')
// subset of countries and codes for demo purposes
const countryDirList = [
'antarctica',
'central-asia',
]
const fbCountryCodes = [
{ "region": "antarctica", "codes": ["ay", "bv"] },
{ "region": "central-asia", "codes": ["kg", "kz"] },
]
const callingUrlsInSequence = async () => {
fs.writeFile('./test.json', '[]', function (err) {
if (err) throw err
console.log('File - test.json - was created successfully.')
})
try {
const urlConstructor = countryDirList.map(async (country) => {
console.log('countries mapped', country)
return fbCountryCodes.filter(async (f) => {
if (country === f.region) {
const urls = f.codes.map(async (c) => {
const response = await axios({
method: 'get',
url: `https://raw.githubusercontent.com/factbook/factbook.json/master/${country}/${c}.json`,
responseType: 'json',
headers: {
'Content-Type': 'application/json',
},
})
fs.readFile('./test.json', function (err, data) {
let json = JSON.parse(data)
json.push(response.data)
setTimeout(() => {
fs.writeFile('./test.json', JSON.stringify(json), function (err) {
if (err) throw err
console.log('The "data to append" was appended to file!')
})
}, 1000)
})
return response.data
})
const dataArr = await Promise.all(urls)
dataArr.map((item) =>
console.log(
'dataArr',
item.Government['Country name']['conventional short form']
)
)
}
})
})
} catch (err) {
console.log('axios error: ', err)
}
}
callingUrlsInSequence()
I'm re-writing this question now because it kept getting downvoted, and I could see that it was not very concise.
I can also see now, that obviously, the fs.readFile inside the fs.writeFile is not going to work in the code I provided, but I'm leaving it there in case it might help someone else, combined with the solution I provided in response to my own question.
I ended up learning how to solve this problem with both node-fetch and axios. They are not exactly the same.
For both:
First, check for existence of destination file, and create one if it's not already there.
const createNew = () => {
try {
if (existsSync('./data.json')) {
console.log('file exists')
return
} else {
writeFile('./data.json', '[]', (error, data) => {
if (error) {
console.log('fs.writeFile - create new - error: ', error)
return
}
})
}
} catch (err) {
console.log('fs.existsSync error: ', err)
}
}
createNew()
Then make the array of urls:
const countryDirList = [...countries]
const fbCountryCodes = [...codes]
const urls = []
// maybe a reducer function would be better, but my map + filter game is much stronger X-D
const makeUrls = (countriesArr, codesArr) =>
countriesArr.map((country) => {
return codesArr.filter((f) => {
if (country === f.region) {
return f.codes.map((code) => {
return urls.push(
`https://raw.githubusercontent.com/factbook/factbook.json/master/${country}/${code}.json`
)
})
}
})
})
makeUrls(countryDirList, fbCountryCodes)
Next, make the requests.
Axios:
fs.readFile('./data.json', (error, data) => {
if (error) {
console.log(error)
return
}
Promise.all(
urls.map(async (url) => {
let response
try {
response = await axios.get(url)
} catch (err) {
console.log('axios error: ', err)
return err
}
return response
})
)
.then((res) => {
const responses = res.map((r) => r.data)
fs.writeFile('./data.json', JSON.stringify(responses, null, 2), (err) => {
if (err) {
console.log('Failed to write data')
return
}
console.log('Updated data file successfully')
})
})
.catch((err) => {
console.log('axios error: ', err)
})
})
Node-fetch:
//same basic structure, readFile with fetch and write file inside
fs.readFile('./data2.json', (error, data) => {
if (error) {
console.log(error)
return
}
async function fetchAll() {
const results = await Promise.all(
urls.map((url) => fetch(url).then((r) => r.json()))
)
fs.writeFile('./data2.json', JSON.stringify(results, null, 2), (err) => {
if (err) {
console.log('Failed to write data')
return
}
console.log('Updated data file successfully')
})
}
fetchAll()
})
Both methods produce exactly the same output: a json file containing a single array with however many response objects in it.

nextPageToken doesn't change

I am trying to use the node.js Google API to iterate over my email messages
const google = require('googleapis');
The problem is that I keep getting the same email messages and every response contains the exact same nextPageToken.
I also noticed that trying to control the maxResults parameter has no effect. I tried to add them to the options object and the query objects and it had no effect in either way.
function listThreads(auth, nextPageToken) {
const gmail = google.gmail('v1');
const query = {
auth : auth,
userId: 'me',
q: ""
};
const options = {maxResults: 20}
if (nextPageToken) {
query.pageToken = nextPageToken;
}
gmail.users.messages.list(query, options, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
const messages = response.messages;
if (messages.length === 0) {
console.log('No threads found.');
} else {
for (let i = 0; i < messages.length; i++) {
const message = messages[i];
gmail.users.messages.get({
auth : auth,
userId: 'me',
id : message.id
}, function(err, response) {
if (err) {
logger.error("Failed to get email", err);
} else {
const parser = new Parser();
parser.parse(response.payload, response.labelIds);
}
});
}
}
if (response.nextPageToken) {
listThreads(auth, response.nextPageToken);
}
});
}
What am I doing wrong?

Categories

Resources