I have come across the below code on the internet.
const { Octokit } = require("#octokit/rest");
const { Base64 } = require("js-base64");
const fs = require("fs");
const { Octokit } = require("#octokit/rest");
const { Base64 } = require("js-base64");
const fs = require("fs");
require("dotenv").config();
const octokit = new Octokit({
auth: process.env.GITHUB_ACCESS_TOKEN,
});
const main = async () => {
try {
const content = fs.readFileSync("./input.txt", "utf-8");
const contentEncoded = Base64.encode(content);
const { data } = await octokit.repos.createOrUpdateFileContents({
// replace the owner and email with your own details
owner: "your-github-account",
repo: "octokit-create-file-example",
path: "OUTPUT.md",
message: "feat: Added OUTPUT.md programatically",
content: contentEncoded,
committer: {
name: `Octokit Bot`,
email: "your-email",
},
author: {
name: "Octokit Bot",
email: "your-email",
},
});
console.log(data);
} catch (err) {
console.error(err);
}
};
main();
I was successfully able to create a file in GitHub. Is there a way to update an existing file (like adding some data to the file) with Octokit js?
I believe you do the same thing but you have to provide a sha property in your call to createOrUpdateFileContents which contains the sha of the blob you are replacing:
https://docs.github.com/en/rest/reference/repos#create-or-update-file-contents
Related
I have a code, it shows folders, I can navigate through them and files too, but this only works in the console:
`
const fs = require('fs/promises');
const fsToRead = require('fs')
const path = require('path');
const inquirer = require('inquirer');
const yargs = require('yargs');
const { lstatSync, readdirSync } = require('fs');
const http = require('http');
const url = require('url');
const options = yargs
.options('p', {
describe: 'Pattern',
default: ''
})
.options('d', {
describe: 'Path to directory',
default: process.cwd()
}).argv;
console.log(options);
class itemsList {
constructor(path, fileName) {
this.path = path;
this.fileName = fileName;
}
get folders() {
return lstatSync(this.path).isDirectory();
}
}
let executionDir = process.cwd();
const start = async () => {
const list = await fs.readdir(executionDir);
const items = list.map(fileName =>
new itemsList(path.join(executionDir, fileName), fileName));
const item = await inquirer.prompt([
{
name: 'fileName',
type: 'list',
message: `Choose: ${executionDir}`,
choices: items.map(item => ({name: item.fileName, value: item})),
}
]).then(answer => answer.fileName);
if (item.folders) {
executionDir = item.path;
return await start();
} else {
const data = await fs.readFile(item.path, 'utf-8');
if (!options.p) {
console.log(data)
}
else {
const regExp = new RegExp(options.p, 'igm');
console.log(data.match(regExp));
}
}
}
start();
`
HOW TO IMPLEMENT THAT SHE SHOWS ON THE PAGE OF THE BROWSER, THAT IN THE WEB VERSION I COULD GO TO THE FOLDERS, IF THIS IS A FILE SHE WOULD SHOW ITS CODE, I WOULD NOT HAPPEN IT!
I'm trying to Stream JSON from MongoDB to S3 with the new version of #aws-sdk/lib-storage:
"#aws-sdk/client-s3": "^3.17.0"
"#aws-sdk/lib-storage": "^3.34.0"
"JSONStream": "^1.3.5",
Try #1: It seems that I'm not using JSONStream.stringify() correctly:
import { MongoClient } from 'mongodb';
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
readStream.pipe(JSONStream.stringify());
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: readStream,
},
});
await upload.done();
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
Error #1:
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of
type string, Buffer, ArrayBuffer, Array, or Array-like Object.
Received type object
at Function.from (buffer.js:305:9)
at getDataReadable (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getDataReadable.ts:6:18)
at processTicksAndRejections (internal/process/task_queues.js:94:5)
at Object.getChunkStream (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getChunkStream.ts:17:20)
at Upload.__doConcurrentUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:121:22)
at async Promise.all (index 0)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:196:5)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:12)
Try #2, using the variable jsonStream:
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
const jsonStream = readStream.pipe(JSONStream.stringify());
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: jsonStream,
},
});
Error #2:
ReferenceError: ReadableStream is not defined
at Object.getChunk (/.../node_modules/#aws-sdk/lib-storage/src/chunker.ts:22:30)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:187:24)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:37)
Try #3: use stream.PassThrough:
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
readStream.pipe(JSONStream.stringify()).pipe(uploadStreamFile('benda_mongo.json'));
...
const stream = require('stream');
export const uploadStreamFile = async(fileName) => {
try{
const pass = new stream.PassThrough();
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: pass,
},
});
const res = await upload.done();
log.info('finished uploading file', fileName);
return res;
}
catch(err){
return;
}
};
Error #3:
'dest.on is not a function at Stream.pipe (internal/streams/legacy.js:30:8'
Try #4: mongodb.stream({transform: doc => JSON.stringify...}) instead of JSONStream:
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
import { env } from '../../../env';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName)
.find('{}')
.limit(5)
.stream({ transform: doc => JSON.stringify(doc) + '\n' });
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: readStream,
},
});
await upload.done();
}
catch (err) {
log.error('waaaaa', err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
Error: #4:
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of
type string, Buffer, ArrayBuffer, Array, or Array-like Object.
Received type object
at Function.from (buffer.js:305:9)
at getDataReadable (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getDataReadable.ts:6:18)
at processTicksAndRejections (internal/process/task_queues.js:94:5)
at Object.getChunkStream (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getChunkStream.ts:17:20)
at Upload.__doConcurrentUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:121:22)
at async Promise.all (index 0)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:196:5)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:12)
Try #5: using stream.PassThrough() and return pass to pipe:
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream({ transform: doc => JSON.stringify(doc) + '\n' });
readStream.pipe(uploadStreamFile());
}
catch (err) {
log.error('waaaaa', err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
const stream = require('stream');
export const uploadStreamFile = async() => {
try{
const pass = new stream.PassThrough();
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: pass,
},
});
await upload.done();
return pass;
}
catch(err){
log.error('pawoooooo', err);
return;
}
};
Error #5:
TypeError: dest.on is not a function
at Cursor.pipe (_stream_readable.js:680:8)
After reviewing your error stack traces, probably the problem has to do with the fact that the MongoDB driver provides a cursor in object mode whereas the Body parameter of Upload requires a traditional stream, suitable for be processed by Buffer in this case.
Taking your original code as reference, you can try providing a Transform stream for dealing with both requirements.
Please, consider for instance the following code:
import { Transform } from 'stream';
import { MongoClient } from 'mongodb';
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
// We are creating here a Transform to adapt both sides
const toJSONTransform = new Transform({
writableObjectMode: true,
transform(chunk, encoding, callback) {
this.push(JSON.stringify(chunk) + '\n');
callback();
}
});
readStream.pipe(toJSONTransform);
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: toJSONTransform,
},
});
await upload.done();
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
In the code, in toJSONTransform we are defining the writable part of the stream as object mode; in contrast, the readable part will be suitable for being read from the S3 Upload method... at least, I hope so.
Regarding the second error you reported, the one related with dest.on, I initially thought, and I wrote you about the possibility, that the error was motivated because in uploadStreamFile you are returning a Promise, not a stream, and you are passing that Promise to the pipe method, which requires a stream, basically that you returned the wrong variable. But I didn't realize that you are trying passing the PassThrough stream as a param to the Upload method: please, be aware that this stream doesn't contain any information because you are not passing any information to it, the contents of the readable stream obtained from the MongoDB query are never passed to the callback nor the Upload itself.
I found additional solution using stream.PassThrough, using JSONStream will stream array of objects instead of one after the other:
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const passThroughStream = new stream.PassThrough();
const readStream = db.collection(collectionName)
.find('{}')
.stream();
readStream.on('end', () => passThroughStream.end());
readStream.pipe(JSONStream.stringify()).pipe(passThroughStream);
await uploadStreamFile('benda_mongo.json', passThroughStream);
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
export const uploadStreamFile = async(fileName, stream) => {
try{
log.info('start uploading file', fileName);
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: `${fileName}`,
Body: stream,
},
});
const res = await upload.done();
log.info('finished uploading file', fileName);
return res;
}
catch(err){
log.error(err);
return;
}
};
I am fairly new to node.js and i am wondering how to (or even if) i can read and write to a JSON file. I am trying to create an accessible punishment history.
Ideally i would want to be able to create something along the lines of this:
{
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
}
Then i would have a way to access the formatted punishment history. I genuinely have no clue where to start.
You can use a NodeJS built-in library called fs to do read/write operations.
Step #1 - Import fs
const fs = require('fs');
Step #2 - Read the file
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
Now you can use the punishments variable to check the data inside the JSON File. Also, you can change the data but it only resides inside the variable for now.
Step #3 - Write to the File
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
Full code:
const fs = require('fs');
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
References:
https://stackabuse.com/reading-and-writing-json-files-with-node-js/
Use NodeJS File System https://nodejs.org/dist/latest-v14.x/docs/api/fs.html.
Here I have used writeFileSync API to write to file and readFileSync to read from file. Also, when writing don't forget to JSON.stringify(data) because you are writing the data to a JSON file.
const fs = require("fs");
const path = require("path");
// Write Data
const data = {
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
};
fs.writeFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), JSON.stringify(data), "utf8");
// Read data
const rData = fs.readFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), "utf8");
const jsonData = JSON.parse(rData);
Here is the working example,
https://repl.it/repls/OutrageousInbornBruteforceprogramming#index.js
you can do something like this for reading:
const fs = require('fs')
function jsonReader(filePath, cb) {
fs.readFile(filePath, (err, fileData) => {
if (err) {
return cb && cb(err)
}
try {
const object = JSON.parse(fileData)
return cb && cb(null, object)
} catch(err) {
return cb && cb(err)
}
})
}
jsonReader('./customer.json', (err, customer) => {
if (err) {
console.log(err)
return
}
console.log(customer.address) // => "Infinity Loop Drive"
})
and like this for writing:
const fs = require('fs')
const customer = {
name: "Newbie Co.",
order_count: 0,
address: "Po Box City",
}
const jsonString = JSON.stringify(customer)
fs.writeFile('./newCustomer.json', jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})
I've done this previously in python using:
with open(path.join(path.dirname(__file__), "job.yaml")) as f:
body= yaml.safe_load(f)
try:
api_response = api_instance.create_namespaced_job(namespace, body)
Looking at source of the nodejs api client:
public createNamespacedJob (namespace: string, body: V1Job, includeUninitialized?: boolean, pretty?: string, dryRun?: string, options: any = {}) : Promise<{ response: http.IncomingMessage; body: V1Job; }> {
How can I generate that the V1Job?
I've tried the below but get back a very verbose error message / response:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
var namespace = {
metadata: {
name: 'test123',
},
};
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob(namespace, job).then(
(response) => {
console.log('Created namespace');
console.log("Success!")
},
(err) => {
console.log(err);
console.log(job);
console.log("Err")
},
);
} catch (e) {
console.log(e);
}
V1Job seems to be an ordinary object so the below worked.
Namespace had to be a string rather than an object...
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(e);
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}
This is the same as chris-stryczynski's example with 2 slight modifications. Also please note that chris-stryczynski's example with NodeJs-8 results in (at least on my side):
(upon execution of k8sApi.createNamespacedJob)
TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type function at promisify
This error does not occur with NodeJs-12.
Here is the modified version:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault(); //You might consider using kc.loadFromFile(...) here
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.load(fs.readFileSync('job.yaml', 'utf8')); // Change#1 safeLoad->load
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(err); // Change#2 e->err
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}
I am trying to implement separation of concerns by using export module. All the code is working if used without separation of concern but as soon as I am trying to import generateUrlArray() from const db = require('../db') nothing is working. Nodejs is not giving me any error on the back-end. The error I am getting on front-end is Error: SyntaxError: Unexpected end of JSON input . I am positive that the error is coming from back-end. Let me know if you have any ideas.
controller.js
const db = require('../db')
exports.getWebApiList = (req, res) => {
(async function fetchDataList() {
try {
const urlArray = await db.generateUrlArray({}, { _id: 0 })
return res.send(urlArray)
} catch (ex) {
console.log(`fetchDataList error: ${ex}`)
}
})()
}
..db/index.js
const { List } = require('./models/List')
const generateUrlArray = (query, projection) => {
const dataFromDB = List.find(query, projection).select('symbol')
return linkArray = dataFromDB.map(item => {
return link = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${item.symbol}&apikey=6BUYSS9QR8Y9HH15`
})
}
module.exports = { generateUrlArray }
.models/List.js
const mongoose = require('mongoose')
mongoose.Promise = global.Promise
const ParentSchemaSymbolList = new mongoose.Schema({
symbol: String
})
module.exports.List = mongoose.model('List', ParentSchemaSymbolList)
const generateUrlArray = async (query, projection) => {
const dataFromDB = await List.find(query, projection).select('symbol')
const linkArray = dataFromDB.map(item => {
return link = `https://www.alphavantage.co/query?function=GLOBAL_QUOTE&symbol=${item.symbol}&apikey=6BUYSS9QR8Y9HH15`
})
return linkArray
}