I am trying to use Capture-Website which saves screenshots of webpages to a file.
It used to work perfectly until I restarted the server.
Now the code runs without errors, but it does NOT save a screenshot to disk
Here is my code:
import http from 'http';
import url from 'url';
import querystring from 'querystring';
var mainURL;
const hostname = 'localhost';
const port = 8080;
import captureWebsite from 'capture-website';
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end('Hello World!\n');
var url_parts = url.parse(req.url, true);
var query = url_parts.query;
mainURL = query.url;
console.log(mainURL);
(async () => {
try {
await captureWebsite.file('https://'+mainURL, mainURL+".jpg", {
overwrite:true,
type: 'jpeg',
quality: 0.5,
width:1480,
height:800,
delay: 1
});
}
catch(err) {
console.log(err);
}
})();
});
There are no errors. I have also tried running pm2 logs - no errors there either.
Here is the file-writing code that belongs to the capture-website package:
captureWebsite.file = async (url, filePath, options = {}) => {
const screenshot = await internalCaptureWebsite(url, options);
await fs.writeFile(filePath, screenshot, {
flag: options.overwrite ? 'w' : 'wx',
});
};
Your problem is most likely that you can not use Slashes (/) in file names. This is because folders are seperated with slashes.
Related
currently, it's working using hash routing, I want to remove hash for SEO reasons so it is possible?
it creates routes like
https://mainDomain/program ← program build run
https://mainDomain/program/#detail ← program routing
I want https://mainDomain/program/#detail to https://mainDomain/program/detail
if I use BrowserRouter it shows no such directory
this is my build deploy structure in AWS ↓
might be this Helpful for you. you can create one Node server which can serve your Project builds.
let path = require("path");
let fsp = require("fs/promises");
let express = require("express");
let isProduction = process.env.NODE_ENV === "production";
async function createServer() {
let app = express();
/**
* #type {import("vite").ViteDevServer}
*/
let vite;
if (!isProduction) {
vite = await require("vite").createServer({
root: process.cwd(),
server: { middlewareMode: "ssr" },
});
app.use(vite.middlewares);
} else {
app.use(require("compression")());
app.use(express.static(path.join(__dirname, "dist")));
}
app.use("*", async (req, res) => {
let url = req.originalUrl;
// Use a separate HTML file for the "Inbox" app.
let appDirectory = url.startsWith("/inbox") ? "inbox" : "";
let htmlFileToLoad;
if (isProduction) {
htmlFileToLoad = path.join("dist", appDirectory, "index.html");
} else {
htmlFileToLoad = path.join(appDirectory, "index.html");
}
try {
let html = await fsp.readFile(
path.join(__dirname, htmlFileToLoad),
"utf8"
);
if (!isProduction) {
html = await vite.transformIndexHtml(req.url, html);
}
res.setHeader("Content-Type", "text/html");
return res.status(200).end(html);
} catch (error) {
if (!isProduction) vite.ssrFixStacktrace(error);
console.log(error.stack);
return res.status(500).end(error.stack);
}
});
return app;
}
createServer().then((app) => {
app.listen(3000, () => {
console.log("HTTP server is running at http://localhost:3000");
});
});
for extra information, you can referlink.
I have checked other similar post but its still not working: Its giving me undefined when console.log. I also defined the multer middleware according to other posts so I don't know what happened. But when I upload an image through postman it works with returning an 201as expected. Any help is appreciated!
ReactJS function:
const UploadImageToBackend = async () => {
console.log(UploadedImage) //UploadedImage is something like /Users/.../.jpg
let formData = new FormData()
formData.append('profile',
{name : new Date() + "_profile", uri: UploadedImage, type:'image/jpg'})
try{
const res = await client.post('/upload-avatar',formData, {
headers:{
Accept : 'application/json',
authorization: 'JWT some JWT'
},
})
console.log(res.data)
}catch(error){
console.log(error.response.data) //gives "Error while uploading Image, try after some time" error
}
}
Backend Routes:
const fileFilter = (req,file,callback) => {
if (file.mimetype.startsWith('image')){
callback(null,true)
}else{
callback('invalid image file', false)
}
}
const storage = multer.diskStorage({})
const uploads = multer({storage, fileFilter})
router.post('/upload-avatar',isAuth, uploads.single('profile'),uploadProfile)
backend upload function (to Cloudinary)
exports.uploadProfile = async (req,res)=>{
const user = req.user
if (!user){
return res.status(401).json({success:false,message:"unauthorized access!"})
}else{
console.log(req.file.path) //undefined
try{
const upload_result = await cloudinary.uploader.upload(req.file.path, {
public_id: `${user._id}_profile`,
width:500,
height:500,
crop: 'fill'
})
await User.findByIdAndUpdate(user._id, {avatar: upload_result.url})
res.status(201).json({success:true,message: "Profile picture successfully uploaded"})
}catch (error){
res.status(500).json({success:false,message:
"Error while uploading Image, try after some time"})
}
}
}
create this function (to upload into Cloudinary), e.g. "lib/cloudinary.js" and add this code:
import cloudinary from "cloudinary";
cloudinary.config({
cloud_name: "YOUR_CLOUD_NAME",
api_key: "YOUR_API_KEY",
api_secret: "YOUR_API_SECRET",
});
const upload = {};
upload.subir = async (file) => {
try {
const res = await cloudinary.uploader.upload(file);
// return the secure url
return res.secure_url;
} catch (error) {
return error;
}
}
export default upload;
Now in your controller, e.g. add this code, do not forget to install express-fileupload:
import cloudinary from "../lib/cloudinary.js";
const upload = {};
upload.uploadProfile = async (req, res) => {
const a_file = await cloudinary.subir(req.files.a_file.tempFilePath);
// show the secure url, e.g.:
// https://res.cloudinary.com/xx/image/upload/yy/winter.jpg
console.log(a_file);
// ... more code
}
export default upload;
Now in your main application e.g., "app.js" add this code to use the express middleware for uploading files:
import express from 'express';
import fileUpload from 'express-fileupload';
const app = express();
app.use(express.json());
app.use(express.urlencoded({extended: true}));
app.use(fileUpload({useTempFiles: true}));
// ... more code
Test the function using postman and the file has been uploaded
NOTE: Do not forget and
remember that this only is an alternative, exist anothers many ways i hope you understand and i hope it works for you
Here is how my API works:
You can find SeaweedFS here on GitHub.
And the code here:
// /drivers/seaweedfs.js Defines how API interacts with SeaweedFS
const { error } = require("console");
const http = require("http");
module.exports = class Weed {
constructor(mserver) {
this.mserver = new URL(mserver);
}
get(fileId) {
return new Promise((resolve, reject) => {
let options = {
hostname: this.mserver.hostname,
port: this.mserver.port,
method: "GET",
path: `/${fileId}`,
timeout: 6000,
};
let data;
const fileReq = http.request(options, (res) => {
console.log(`Statuscode ${res.statusCode}`);
res.on("data", (response) => {
data += response;
});
res.on("end", () => {
resolve(data);
});
});
fileReq.on("error", () => {
console.error(error);
reject();
});
fileReq.end();
});
}
};
// /routes/file.js An Express router
const express = require("express");
const router = express.Router();
const Weed = require("../drivers/seaweedfs");
let weedClient = new Weed("http://localhost:60002");
router.get("/:fileId", (req, res) => {
weedClient.get(req.params.fileId)
.then(data=>{
res.write(data)
res.end()
})
}
)
module.exports = router;
MongoDB driver not yet implemented.
When I try to GET a file(using Firefox, Hoppscotch says Could not send request: Unable to reach the API endpoint. Check your network connection and try again.), I get something whose MIME type is application/octet-stream for some reason. It's bigger than the original file. I know there must be some problems with my code, but I don't know where and how to fix it.
Can't upload files to the server. I've used npm install express-fileupload and also did the var fileUpload = require('express-fileupload') and app.use(fileUpload()). And while calling
router.post('/add-products',(req,res)=>{
console.log(req.body);
console.log(req.files.image);
It says cannot read image of null.
I can give you a working full example.
Project structure:
- storage (empty folder)
- routes
-> upload.route.js
- controllers
-> upload.controller.js
index.js
index.js
const express = require('express');
const app = express();
const route = require('./routes/upload.route');
app.use('/', route);
let port = 8000;
app.listen(port);
console.log(`API listens localhost:${port}`);
This is your upload.route.js
const express = require('express');
const router = express.Router();
const { uploadController } = require('../controllers/upload.controller');
router.use('/media/upload', uploadController);
module.exports = router;
This is upload.controller.js
const formidable = require('formidable');
const path = require('path');
exports.upload = async (req, res) => {
try {
// Receive the media and store it
let [uploadPath, filename] = await processUpload(req);
return res
.status(200)
.send({
success: 1,
message: "File uploaded",
filename,
uploadPath
});
} catch (error) {
return res
.status(400)
.send({
success: 0,
message: "Ops! Something went wrong",
errorObject: error.message
});
}
}
function processUpload(req) {
return new Promise((resolve, reject) => {
try {
let uploadDir = __dirname + `/../storage`;
// We used helper formidable package
let form = new formidable.IncomingForm()
form.multiples = true;
form.keepExtensions = true;
// Upload path
form.uploadDir = uploadDir;
let result;
form.on('fileBegin', function (name, file) {
if (!file.type) reject(new Error("No media specified!"));
const fileExt = path.extname(file.name);
let filename = "test" + fileExt;
file.path = path.join(uploadDir, filename);
// Return the path where file uploaded
result = [file.path, uuid];
});
form.parse(req, (err, fields, files) => {
if (err) return reject("Upload failed.");
resolve(result);
});
} catch (error) {
reject("Upload failed.");
}
});
}
When you call localhost:8000/media/upload with a POST or PUT request with postman form-data. You can see the uploaded file under the storage folder in the project.
Let me know if something goes wrong with the code
Note: You need to use formidable (For uploading) package to run the example
I have an http-proxy to proxy any website and inject some custom JS file before to serve the HTML back to the client. Whenever I try to access the proxied website, it will hang up or the browser seems to load indeterminately. But when I check the HTML source, I successfully managed to inject my custom JavaScript file. Here is the code:
const cheerio = require('cheerio');
const http = require('http');
const httpProxy = require('http-proxy');
const { ungzip } = require('node-gzip');
_initProxy(host: string) {
let proxy = httpProxy.createProxyServer({});
let option = {
target: host,
selfHandleResponse: true
};
proxy.on('proxyRes', function (proxyRes, req, res) {
let body = [];
proxyRes.on('data', function (chunk) {
body.push(chunk);
});
proxyRes.on('end', async function () {
let buffer = Buffer.concat(body);
if (proxyRes.headers['content-encoding'] === 'gzip') {
try {
let $ = null;
const decompressed = await ungzip(buffer);
const scriptTag = '<script src="my-customjs.js"></script>';
$ = await cheerio.load(decompressed.toString());
await $('body').append(scriptTag);
res.end($.html());
} catch (e) {
console.log(e);
}
}
});
});
let server = http.createServer(function (req, res) {
proxy.web(req, res, option, function (e) {
console.log(e);
});
});
console.log("listening on port 5051");
server.listen(5051);
}
Can someone please tell me if I am doing anything wrong, it looks like node-http-proxy is dying a lot and can't rely much on it since the proxy can work sometimes and die at the next run, depending on how many times I ran the server.
Your code looked fine so I was curious and tried it.
Although you do log a few errors, you don't handle several cases:
The server returns a body with no response (cheerio will generate an empty HTML body when this happens)
The server returns a response that is not gzipped (your code will silently discard the response)
I made a few modifications to your code.
Change initial options
let proxy = httpProxy.createProxyServer({
secure: false,
changeOrigin: true
});
Don't verify TLS certificates secure: false
Send the correct Host header changeOrigin: true
Remove the if statement and replace it with a ternary
const isCompressed = proxyRes.headers['content-encoding'] === 'gzip';
const decompressed = isCompressed ? await ungzip(buffer) : buffer;
You can also remove the 2 await on cheerio, Cheerio is not async and doesn't return an awaitable.
Final code
Here's the final code, which works. You mentioned that "it looks like node-http-proxy is dying a lot [...] depending on how many times I ran the server." I experienced no such stability issues, so your problems may lie elsewhere if that is happening (bad ram?)
const cheerio = require('cheerio');
const http = require('http');
const httpProxy = require('http-proxy');
const { ungzip } = require('node-gzip');
const host = 'https://github.com';
let proxy = httpProxy.createProxyServer({
secure: false,
changeOrigin: true
});
let option = {
target: host,
selfHandleResponse: true
};
proxy.on('proxyRes', function (proxyRes, req, res) {
console.log(`Proxy response with status code: ${proxyRes.statusCode} to url ${req.url}`);
if (proxyRes.statusCode == 301) {
throw new Error('You should probably do something here, I think there may be an httpProxy option to handle redirects');
}
let body = [];
proxyRes.on('data', function (chunk) {
body.push(chunk);
});
proxyRes.on('end', async function () {
let buffer = Buffer.concat(body);
try {
let $ = null;
const isCompressed = proxyRes.headers['content-encoding'] === 'gzip';
const decompressed = isCompressed ? await ungzip(buffer) : buffer;
const scriptTag = '<script src="my-customjs.js"></script>';
$ = cheerio.load(decompressed.toString());
$('body').append(scriptTag);
res.end($.html());
} catch (e) {
console.log(e);
}
});
});
let server = http.createServer(function (req, res) {
proxy.web(req, res, option, function (e) {
console.log(e);
});
});
console.log("listening on port 5051");
server.listen(5051);
I ended up writing a small Python Server using CherryPy and proxied the web app with mitmproxy. Everything is now working smoothly. Maybe I was doing it wrong with node-http-proxy but I also became sceptic about using it in a production environment.