I have the following Lambda function to delete an item from my DynamoDB.
const AWS = require("aws-sdk");
AWS.config.update({ region: "us-west-2" });
var docClient = new AWS.DynamoDB.DocumentClient();
var deleteContact = function(event,callback) {
var params = {
TableName:"Contacts",
Key:{
id: event.id
},
ConditionExpression: "set event.id = :id",
ExpressionAttributeValues: {
":id": event.id
}
};
console.log("Attempting a conditional delete...");
docClient.delete(params, function(err, data) {
if (err) {
console.error("Unable to delete item. Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("DeleteItem succeeded:", JSON.stringify(data, null, 2));
}
});
};
deleteContact();
and here is the code in my react app which is requesting:
export const removeContact = createAsyncThunk(
'contactsApp/contacts/removeContact',
async (contactId, { dispatch, getState }) => {
await axios.post('https://API.amazonaws.com/prod', {
key1: `${contactId}`
});
console.log(contactId)
return contactId;
}
);
Currently, code works and deletes the parameters in the row. But the problem is it will not remove the id from the DynamoDB table. So everything will be deleted EXCEPT id.
As result, I would have GHOST items in my DynamoDB table:
DynamoDB screenshot
Related
I have a dynamodb table, with columns as id (partition key) and date. I am trying to update the date where id=2, but I am getting below error as response:
message: "The provided key element does not match the schema"
__type: "com.amazon.coral.validate#ValidationException"
Below is my code:
import * as AWS from 'aws-sdk'
AWS.config.update({
region: 'us-east-1',
accessKeyId: 'MY_ACCESS_KEY',
secretAccessKey: 'MY_SECRET_KEY'
});
const docClient = new AWS.DynamoDB.DocumentClient()
export const updateData = (tableName : any, id : any, date : any) => {
let params = {
TableName: tableName,
Key:{
"id": id
},
UpdateExpression: `set date = :date`,
ExpressionAttributeValues: {":date": date},
};
docClient.update(params, function(err, data) {
if (!err) {
console.log(data);
}else{
console.log(err)
}
})
}
I am calling function like this:
updateData("mytable","2","2023-01-10")
I anyone confirm what I am doing wrong here?
date is a reserved keyword in DynamoDB
You need to use ExpressionAttributeNames param:
let params = {
TableName: tableName,
Key:{
"id": id
},
UpdateExpression: `set #date = :date`,
ExpressionAttributeValues: {":date": date},
ExpressionAttributeNames: {"#date":"date"}
};
As for the exception, ensure that your tables partition key is id and if type String, and that the table does not have a sort key, otherwise you need to add the sort key also.
I have a sqlite3 database request in my main.js, that is triggered by button click in renderer.js.
The request reaches my main.js. However, I cannot manage to await the results from the database. The issue occurs already in main.js, so I'm stuck even before anything is passed back to the renderer.js.
I hope someone can tell me what I am missing.
Here is my code:
renderer.js
$(document).on('click','#mybtn',function(e){
let query = "SELECT id, name FROM table1"
// send (here is the issue)
window.api.send("db-query", query)
// (next step: receive, might be wrong but not yet my problem)
window.api.receive(channel="receive-db-data", (data) => {
console.log(data);
});
});
main.js
ipcMain.on(channel='db-query', async (e, query) => {
console.log('query received: ' + query);
let data = await db_request(query).then(
function(value) {
console.log('value: ' + value);
return value;
},
function(error) {
console.log('error fetching data from db on query:' + query);
}
)
console.log("response ready: " + data); //returns undefined if 'return value' is used (otherwise nothing)
// to send back to renderer.js later
e.sender.send("db-data", data)
})
let db_request = async (query) => {
let data = []
var sqlite3 = require('sqlite3').verbose();
var dbPath = require('path').resolve(__dirname, '../../Fin.db')
var db = new sqlite3.Database(dbPath)
db.serialize(function(){
db.each(query, function(err, row) {
console.log(row)
data.push({"id": row.id, "name": row.name})
});
});
db.close();
console.log('db_request:' + data)
return data
}
And this is how my terminal looks like:
query received: SELECT id, type, name FROM table1
db_request:
value:
response ready: undefined
{ id: 1, name: 'a' }
{ id: 2, name: 'b' }
{ id: 3, name: 'c' }
You have to convert db_request result to a Promise, and the promise will be resolved when all rows are pushed to the data. When you use the await keyword, there is no need to handle a promise with .then chain.
main.js will look like this:
const sqlite3 = require('sqlite3').verbose();
const dbPath = require('path').resolve(__dirname, '../../Fin.db')
ipcMain.on(channel='db-query', async (e, query) => {
console.log('query received: ' + query)
try {
const data = await db_request(query); // remove .then
console.log('value: ' + data)
// to send back to renderer.js later
e.sender.send("db-data", data)
} catch (error) {
console.log('error fetching data from db on query:' + query);
e.sender.send("db-data", []) // send empty data or error ???
}
})
let db_request = (query) => {
const db = new sqlite3.Database(dbPath)
return new Promise((resolve, reject) => { // return a promise
// I think you dont need serialize for this case
const data = []
db.each(query, (err, row) => {
console.log(err, row)
if (!err) {
data.push({"id": row.id, "name": row.name})
}
}, (error) => {
if (error) {
reject(error)
} else {
resolve(data)
}
});
})
}
I am trying to query Dynomo DB table and I want to go through over the resulting items in a function in my AWS Lambda. I am not able to extract result from Dynamo DB query. It is inside the closure, I am able to console log it, but I am not able to assign it for any variable in the scope of outer function.
What should I do to get it outside?
function check(id) {
//build params
let params = {
TableName: 'demo_table',
KeyConditionExpression: #key =: id,
Limit: 5,
ScanIndexForward: false,
ExpressionAttributeNames: {
#key: process.env.PRIMARYKEY
},
ExpressionAttributeValues: {
: id: id
}
};
//query ddb
let result = {};
ddb.query(params, function(err, data) {
if (err) {
console.log("AN ERROR OCCURED\n");
console.log(err);
} else {
//How to copy the data from here to outside??
//I can console log and see the data
result = data;
}
});
console.log(result); //returns {}
}
const check = async (id) => {
//build params
let params = {
TableName: 'demo_table',
KeyConditionExpression: #key =: id,
Limit: 5,
ScanIndexForward: false,
ExpressionAttributeNames: {
#
key: process.env.PRIMARYKEY
},
ExpressionAttributeValues: {
: id: id
}
};
let result = await new Promise((resolve, rejects) => {
ddb.query(params, function (err, data) {
if (err) rejects(err)
resolve(data)
});
})
console.log(result); //returns {}
}
By using promises you can get the data. database read is an asyncronous operation.
I have a lambda function that's suppose to be writing to a database. When I run it on my local machine it works but then when I upload it to lambda and test it It doesn't put anything in the database. The role I have the function using has full access to DynamoDB and its the exact same code that works fine when I run it from my laptop. Any idea why that would be the case?
Here's my lambda. The dao class contains the code that actually accesses dynamo. I'm just trying to upload some constant strings right now.
const DAO = require('./PostStatusDAO.js');
exports.handler = async (event, context, callback) => {
var dao = new DAO();
dao.post("this is a test", "#jordan", "#matt", "none");
const response = {
statusCode: 200,
body: {
result: "good"
}
};
return response;
};
const AWS = require('aws-sdk');
const ddb = new AWS.DynamoDB.DocumentClient({region: 'us-west-2'});
class PostStatusDAO {
post(in_text, in_user, in_author, in_attachment) {
var params = {
Item: {
user: String(in_user),
timestamp: Date.now(),
author: String(in_author),
text: String(in_text),
attachment: String(in_attachment),
},
TableName: 'Feed',
};
console.log(params);
var result = ddb.put(params, (err, data) => {
console.log("callback");
if(err) {
console.log("Error: ", err);
} else {
console.log("Data: ", data);
}
});
// console.log(result);
}
}
module.exports = PostStatusDAO;
To see the reason why your function is failing you have to either run it synchronously or return the promise back to the caller/runtime like this:
const DAO = require('./PostStatusDAO.js');
exports.handler = async(event, context, callback) => {
var dao = new DAO();
// Return new promise
return new Promise(function(resolve, reject) {
// Do async job
dao.post("this is a test", "#jordan", "#matt", "none", function(err, data) {
if (err) {
console.log("Error: ", err);
reject(err);
}
else {
console.log("Data: ", data);
resolve(data);
}
})
})
};
const AWS = require('aws-sdk');
const ddb = new AWS.DynamoDB.DocumentClient({region: 'us-west-2'});
class PostStatusDAO {
async post(in_text, in_user, in_author, in_attachment, callback) {
var params = {
Item: {
user: String(in_user),
timestamp: Date.now(),
author: String(in_author),
text: String(in_text),
attachment: String(in_attachment),
},
TableName: 'Feed',
};
console.log(params);
return ddb.put(params, callback).promise();
}
}
module.exports = PostStatusDAO;
I have the following code I am trying to upload to DynamoDB local host using Node.js.
Is there a possible work around. For the following error?
Unable to add event undefined . Error JSON: {
"message": "One of the required keys was not given a value",
"code": "ValidationException",
"time": "2016-06-28T04:02:26.250Z",
"requestId": "970984e4-3546-41f0-95f9-6f1b7167c510",
"statusCode": 400,
"retryable": false,
"retryDelay": 0
}
Here is the code. I would like the Item: {} to accept whatever values may be present, and add them to the table.
var AWS = require("aws-sdk");
var fs = require('fs');
AWS.config.update({
region: "us-west-2",
endpoint: "http://localhost:8000"
});
var docClient = new AWS.DynamoDB.DocumentClient();
console.log("Importing movies into DynamoDB. Please wait.");
var allMovies = JSON.parse(fs.readFileSync('moviedata.json', 'utf8'));
allMovies.forEach(function(movie) {
var params = {
TableName: "Movies",
Item: {
"year": movie.year,
"title": movie.title,
"info": movie.info,
"twitter": movie.twitter
}
};
docClient.put(params, function(err, data) {
if (err) {
console.error("Unable to add movie", movie.title, ". Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("PutItem succeeded:", movie.title);
}
});
});
As you are looping over a promise call, you need a safeguard that the current promise resolves before you begin the next.
var AWS = require("aws-sdk");
var fs = require('fs');
const tableName = 'Movies';
AWS.config.update({
region: "local",
endpoint: "http://localhost:8000"
});
var docClient = new AWS.DynamoDB.DocumentClient();
console.log("Importing movies into DynamoDB. Please wait.");
var allMovies = JSON.parse(fs.readFileSync('moviedata.json', 'utf8'));
for (let i = 0, p = Promise.resolve(); i < allMovies.length; i++) {
p = p.then(_ => new Promise(resolve =>
setTimeout(function () {
var params = {
TableName: tableName,
Item: {
"year": allMovies[i].year,
"title": allMovies[i].title,
"info": allMovies[i].info
}
};
docClient.put(params, function(err, data) {
if (err) {
console.error("Unable to add movie", allMovies[i].title, ". Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("PutItem succeeded:", allMovies[i].title);
}
});
resolve();
}, 10)
));
}