I am using gTTS module to convert text to .mp3 saving it temporarily. After saving I am try to stream the file but when I look at response object returned by the endpoint the arraybuffer looks empty.
const express = require('express')
, router = express.Router()
, bodyParser = require('body-parser')
, gtts = require('node-gtts')('en')
, path = require('path')
, filePath = path.join(__dirname, 'temp', 'temp.mp3')
, fs = require('fs')
, ms = require('mediaserver')
router.use(bodyParser.urlencoded({
extended: true
}));
router.use(bodyParser.json());
router.get('/speech', function(req, res) {
console.log("query", req.query.text);
saveFile(req.query.text,req.query.lang)
.then(response => {
console.log('looking for file', filePath)
fs.exists(filePath, (exists) => {
if (exists) {
// console.log('going to stream');
// ms.pipe(req, res, filePath);
// console.log("findigh");
const stat = fs.statSync(filePath)
const fileSize = stat.size
const range = req.headers.range
console.log('size ', fileSize);
if (range) {
const parts = range.replace(/bytes=/, "").split("-")
const start = parseInt(parts[0], 10)
const end = parts[1] ? parseInt(parts[1], 10) : fileSize-1
const chunksize = (end-start)+1
const file = fs.createReadStream(path, {start, end})
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'audio/mp3',
}
res.writeHead(206, head);
file.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'audio/mp3',
}
res.writeHead(200, head)
fs.createReadStream(filePath).pipe(res)
}
} else {
console.log('file not found');
res.send('Error - 404');
res.end();
}
})
})
.catch(err => {
console.log('error in saving file' ,err);
});
});
const saveFile = (text, language) => {
return new Promise((resolve, reject) => {
gtts.save(filePath, text, function() {
console.log('create file')
resolve("done");
})
});
}
module.exports = router`
The fetch call looks like this:
fetch(`/speech?lang=en&text=${translationBody.value}`, {
method:'GET',
headers: new Headers({'content-type': 'application/json'})
})
.then(res => res)
.then(res => console.log(res))
.catch(err => console.log('err', err))
Is there something wrong in the endpoint or should I change my fetch call?
Yes, you do need a bit of extra footwork here, setting a couple of headers. Sample code would look like this:
const http = require('http');
const fileSystem = require('fs');
const path = require('path');
http.createServer(function(request, response) {
const filePath = path.join(__dirname, 'file.mp3');
const stat = fileSystem.statSync(filePath);
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
const readStream = fileSystem.createReadStream(filePath);
readStream.pipe(response);
})
.listen(3000);
Related
I have an app which calls a cloud function endpoint:
import './App.css';
import React from 'react';
import axios from 'axios';
function App() {
const [file, setFile] = React.useState(null);
function fileSelected(e)
{
setFile(()=> e.target.files[0]);
}
function uploadFile()
{
console.log(file)
const fd = new FormData();
fd.append('image', file, file.name);
console.log(file);
console.log(file.name);
axios.post('https://us-central1-athelasapp.cloudfunctions.net/uploadFile', fd)
.then(res => {
console.log(res)
});
}
return (
<div className="App">
<input type="file" onChange={fileSelected}/>
<input type="submit" onClick={uploadFile}/>
</div>
);
}
export default App;
and the endpoint tries to parse it with Busboy however, it throws a 500 error. I have Busboy imported but it throws:
xhr.js:220 POST https://us-central1-athelasapp.cloudfunctions.net/uploadFile 500`
const functions = require("firebase-functions");
const express = require("express");
const cors = require("cors");
const app = express();
const Busboy = require("busboy");
const os = require("os");
const path = require("path");
const fs = require("fs");
const gcconfig = {
projectId: "athelasapp",
keyFilename: "athelasapp-firebase-adminsdk-yojnp-1e9141a009.json",
};
const {Storage} = require("#google-cloud/storage");
const gcs = new Storage(gcconfig);
app.use(cors({origin: "http://localhost:3000"}));
// // Create and Deploy Your First Cloud Functions
// https://firebase.google.com/docs/functions/write-firebase-functions
exports.uploadFile = functions.https.onRequest(app);
app.post("/", (req, res) =>{
if (req.method !== "POST") {
return res.status(500).json({
message: "Method Does Not Work",
});
}
const busboy = new Busboy({headers: req.headers});
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = {file: filepath, type: mimetype};
file.pipe(fs.createWriteStream(filepath));
res.status(200).json({
imageDetails: uploadData,
});
});
busboy.on("finish", ()=>{
const bucket = gcs.bucket("athelasapp.appspot.com");
bucket.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type,
},
},
});
}).then(() => {
res.status(200).json({
message: "Method Works!",
});
}).catch((err) =>{
res.status(500).json({
message: "Method Failed!",
});
});
busboy.end(req.rawBody);
res.status(200).json({
message: "Method Works",
});
});
I cant find any errors in my code or how it's implemented? Could it be I'm passing in the wrong things in the request? I think it might have to do with the nomenclature of Busboy
Here's a working gist streaming directly instead of creating the temporary file: https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}
Everything worked for me until I added csrf. I use in public/js/editor.js fetch to send the image file to the server:
fetch('/upload', {
method: 'post',
body: formdata
}).then(res => res.json())
.then(data => {
if (uploadType == 'image')
{
addImage(data, file.name);
}
else if (uploadType == 'banner')
{
bannerPath = `${location.origin}/${data}`;
banner.style.backgroundImage = `url("${bannerPath}")`
}
else
{
console.error('Данный тип файла не поддерживается');
}
})
In the server.js I accept the file:
app.post('/upload', (req, res) => {
console.log(req.files);
let file = req.files.image;
let date = new Date();
console.log('test post');
// image name
let imagename = date.getDate() + date.getTime() + file.name;
// image upload path
let path = 'public/uploads/' + imagename;
// create upload
file.mv(path, (err, result) => {
if (err) {
throw err;
} else {
// our image upload path
res.json(`uploads/${imagename}`)
}
})
})
After adding csrf files began to look like this:
Become:
editor.js FULL
const csrfToken = getCookie('XSRF-TOKEN');
console.log(csrfToken);
const headers = new Headers({
'Content-Type': 'x-www-form-urlencoded',
'X-CSRF-Token': csrfToken
});
fetch('/upload', {
method: 'post',
headers: headers,
credentials: 'include',
body: formdata
}).then(res => res.json())
.then(data => {
if (uploadType == 'image')
{
addImage(data, file.name);
}
else if (uploadType == 'banner')
{
bannerPath = `${location.origin}/${data}`;
banner.style.backgroundImage = `url("${bannerPath}")`
}
else
{
console.error('Данный тип файла не поддерживается');
}
})
function getCookie(name) {
if (!document.cookie) {
return null;
}
const xsrfCookies = document.cookie.split(';')
.map(c => c.trim())
.filter(c => c.startsWith(name + '='));
if (xsrfCookies.length === 0) {
return null;
}
return decodeURIComponent(xsrfCookies[0].split('=')[1]);
}
and server.js FULL
const cookieParser = require("cookie-parser");
const csrf = require("csurf");
const csrfMiddleware = csrf({ cookie: true });
app.use(cookieParser());
app.use(csrfMiddleware);
app.all("*", (req, res, next) => {
var token = req.csrfToken();
res.cookie("XSRF-TOKEN", token);
res.locals.csrfToken = token;
next();
});
app.use(function (req, res, next) {
var token = req.csrfToken();
res.cookie('XSRF-TOKEN', token);
res.locals.csrfToken = token;
next();
});
//upload link
app.post('/upload', (req, res) => {
console.log(req.files);
let file = req.files.image;
let date = new Date();
console.log('test post');
// image name
let imagename = date.getDate() + date.getTime() + file.name;
// image upload path
let path = 'public/uploads/' + imagename;
// create upload
file.mv(path, (err, result) => {
if (err) {
throw err;
} else {
// our image upload path
res.json(`uploads/${imagename}`)
}
})
})
Problem
But now after uploading the image to editor.js , an error occurs in server.js:
TypeError: Cannot read properties of undefined (reading 'image')
The variable req.files has become undefined
What is the problem?
'Content-Type': 'x-www-form-urlencoded',
You're overriding the Content-Type header the browser was sending with the request.
Since the new value is wrong, the server doesn't know how to parse the body.
Don't do that.
It works fine when I use localhost. I am also using the correct URl in the frontend. It works for getting data and editing data but donot work when i try to post data using formdata using heroku URL.
Order.controller.js
const { cloudinaryUpload } = require('../middleware/cloudinary');
const Order = require('../Model/Order');
const addData= (req, res, next) => {
let data = JSON.parse(req.body.order);
data.files = req.files
console.log(data);
const OrderData = new Order({})
let mappedData = mapOrder(OrderData, data);
mappedData.save((err, saved) => {
if (err) {
console.log(err)
return next({
msg: err,
status: 400
})
}
res.json({
msg: "Submitted successfully"
})
})
// cloudinaryUpload(mappedData.referenceImage1.image)
// .then((response) => {
// mappedData.referenceImage1.image = response.public_id
// })
// .catch((error) => {
// console.log(err)
// return next({
// msg: error,
// status: 400
// })
// })
}
const GetData=(req, res, next) => {
Order.find({ orderStatus: "processing" }, (err, orders) => {
if (err) {
return next({
msg: err,
status: 404
})
}
res.json({
orders: orders
})
})
}
const EditData=(req, res, next) => {
let ID = req.query.id
Order.find({ OrderId: ID }, (err, order) => {
if (err) {
return next({
msg: err,
status: 404
})
}
order[0].orderStatus = "ReadyToPickUp"
order[0].save((err, updated) => {
if (err) {
return next({
msg: err
})
}
res.json({
order: updated
})
})
})
}
function mapOrder(OrderData, data) {
OrderData.orderType = data.OrderType
OrderData.customer.fullname = data.name;
OrderData.customer.phone = data.phone;
OrderData.customer.optionalPhone = data.optionalPhone || "";
OrderData.customer.address = data.address;
OrderData.cakeDescription.weight = data.Weight;
OrderData.cakeDescription.flavor = data.Flavor;
OrderData.cakeDescription.shape = data.Shape;
OrderData.cakeDescription.eggless = data.eggless;
OrderData.cakeDescription.messageoncake = data.messageoncake || "";
OrderData.orderStatus = "processing";
if (data.files[0]) {
OrderData.referenceImage1.image = data.files[0].filename
}
if (data.files[1]) {
OrderData.referenceImage2.image = data.files[1].filename
}
OrderData.referenceImage1.note = data.ImageNote1
OrderData.referenceImage2.note = data.ImageNote2
OrderData.deliveryDate = data.date;
OrderData.deliveryTime = data.time;
OrderData.amount = data.Amount;
OrderData.advancePaid = data.advance;
return OrderData;
}
module.exports = {
addData,GetData,EditData
};
Imagehandler.js
const multer = require('multer');
const path = require('path');
const storage= multer.diskStorage({
destination: function(req,file,cb){
cb(null,path.join(process.cwd(),'/uploads'))
},
filename : function(req,file,cb){
cb(null,Date.now()+'-'+file.originalname)
}
})
const fileFilter=(req,file,cb)=>{
let image = file.mimetype // mimetype is type of image/jpeg
let fileType = image.split('/')
if(fileType[0] === "image" ){
cb(null,true)
}
else{
req.fileError = true;
cb(null,false)
}
}
var upload = multer({ storage: storage, fileFilter: fileFilter})
module.exports = upload
app.route.js
const router = require('express').Router();
const upload = require('./middleware/imagehandler');
const {addData,EditData,GetData} = require('./api/order.controller');
router.post('/addorder',upload.array('images',3),addData)
router.put('/editorder',EditData)
router.get('/getorder',GetData)
module.exports = router
App.js
const express = require('express');
const app = express();
const path = require('path')
const cors = require('cors')
const orderRouter = require('./api/order.controller');
const dotenv = require('dotenv').config(); // it checks defalut .env file if present and cofigure it.
require('./db.initialize');
app.use(cors());
app.use('/api',orderRouter);
app.use('/files',express.static(path.join(process.cwd(),'/uploads')))
app.use((req,res,next)=>{
res.json({
msg: "Error occured",
satus: 400
})
})
// it recieves error from next and we can use the error information
app.use((error,req,res,next)=>{ // error handler
res.status(error.status || 400)
res.json({
msg : error.msg || "Error",
status: error.status || 400
})
})
app.listen(process.env.PORT || 8080,(err,done)=>{
if(err){
console.log("Server failed")
}
else{
console.log("Server Started at",process.env.PORT);
}
})
db.initialize.js
const mongoose = require('mongoose');
const {DB_NAME} = require('./configs/index.config')
let DBURL=`mongodb+srv://bibekdb:password#webapp.iqxjb.mongodb.net/${DB_NAME}?retryWrites=true&w=majority`
mongoose.connect(process.env.MONGODB_URI || DBURL,{
useNewUrlParser: true,
useUnifiedTopology: true
})
.then((done)=>{
console.log("Heroku");
console.log("DB connected successfully..",process.env.DB_NAME);
})
.catch((error)=>{
console.log("Db connection failed")
})
httpClient.js
I am hitting the correct API using xhr and sending data using formdata. I dont know what i am missing
import axios from 'axios';
const base_URL = process.env.REACT_APP_BASE_URL;
const https = axios.create({
baseURL: process.env.REACT_APP_BASE_URL,
timeout: 10000,
timeoutErrorMessage: "Server timeout",
responseType: "json",
headers: {
'Content-Type': "application/json"
}
})
const GET =(url)=>{
return https.get(url)
}
const EDIT =(url,params)=>{
return https.put(url,null,{
params
})
}
const Upload = (method, url, data = {}, files = []) => {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
const formdata = new FormData()
const orderData = JSON.stringify(data);
formdata.append('order',orderData)
files.forEach((file, index) => {
formdata.append('images', file, file.name)
})
xhr.onreadystatechange=()=> {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
resolve(xhr.response)
}
else {
reject(xhr.response)
}
}
}
xhr.open(method, `${base_URL}${url}`)
xhr.send(formdata);
})
}
I created a server without express and I'm trying to server a simple static webpage on localhost
here is my code :
const fs = require('fs')
const url = require('url');
const hostname = 'localhost'
const port = 3000;
const path = require ('path');
const http = require ('http')
const server = http.createServer((req, res) => {
if (req.url.match (/.css$/)) {
let cssPath = path.join (__dirname, req.url)
let cssReadStream = fs.createReadStream (cssPath, 'UTF-8')
res.statusCode = 200;
res.setHeader ("Content-Type", "text/css");
cssReadStream.pipe (res)
}
if (req.url === "/") {
fs.readFile("./index.html", 'UTF-8', (err, data) => {
res.statusCode = 200;
res.setHeader("Content-Type", "text/html");
res.end(data);
})
}
if (req.url.match (/.jpg$/)) {
let jpgPath = path.join (req.url)
console.log (jpgPath)
let jpgReadStream = fs.createReadStream (jpgPath, 'UTF-8')
res.statusCode = 200;
res.setHeader ('Content-Type', 'image/jpg')
jpgReadStream.pipe (res)
}
})
server.listen (port, hostname, () => {
console.log ('server start')
})
first of all, it can display the HTML and CSS, however, localhost just keeps on loading after HTML and CSS displayed. Second of All, the image cannot be display (a instagram icon name icon.jpg).
the end result should look something like this:
I guess it has something to do with the favicon thing, but how do i fix it?
you need to use response.send() function which will be used to send the response back
const server = http.createServer((req, res) => {
if (req.url.match (/.css$/)) {
let cssPath = path.join (__dirname, req.url)
let cssReadStream = fs.createReadStream (cssPath, 'UTF-8')
res.statusCode = 200;
res.setHeader ("Content-Type", "text/css");
cssReadStream.pipe (res)
res.send("your data")
}
if (req.url === "/") {
fs.readFile("./index.html", 'UTF-8', (err, data) => {
res.statusCode = 200;
res.setHeader("Content-Type", "text/html");
res.end(data);
res.send("your data")
})
}
if (req.url.match (/.jpg$/)) {
let jpgPath = path.join (req.url)
console.log (jpgPath)
let jpgReadStream = fs.createReadStream (jpgPath, 'UTF-8')
res.statusCode = 200;
res.setHeader ('Content-Type', 'image/jpg')
jpgReadStream.pipe (res)
res.send("your data")
}
})
I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}