NodeJS Email Alias Server - javascript

The scenario is simple:
I own several domains
I want to run email on those domains
I wantthem all to deliver to the same address regardless of who they where
sent to or on which domain
I want them to retain the original headers
specifically the to field
For this project i need use NodeJS
I have tried several things to no avail, I'm pretty proficient in node, but have no idea what I'm doing when it comes to emails.
I have all the domains pointed to the correct place, including the MX records, and using simplesmtp, i am able to receive the emails, i just can't figure out how to deliver them without trashing the headers or forwarding them in which case they show up at their destination as two emails.
Any suggestions or guidance is much appreciated
Regards,
David
Since we all love code here is the relevant stuff
var simplesmtp = require("simplesmtp"),
MailParser = require("mailparser").MailParser,
mailparser = new MailParser(),
nodemailer = require("nodemailer"),
gmail = nodemailer.createTransport("SMTP",{
service: "Gmail",
auth: {
user: "***",
pass: "***"
}
}),
fs = require("fs"),
smtp = simplesmtp.createServer();
// Setup the listener
smtp.listen(25);
// runs when the email is initially recieved
smtp.on("startData", function(connection){
// log out some basic stuff
console.log("Message from:", connection.from);
console.log("Message to:", connection.to);
// start the write stream
connection.saveStream = fs.createWriteStream("/path/message.txt");
});
// each chunk of data that is received
smtp.on("data", function(connection, chunk){
// continue the write stream
connection.saveStream.write(chunk);
});
// email completly received
smtp.on("dataReady", function(connection, callback){
// end the write stream
connection.saveStream.end();
// log some more stuff
console.log("Incoming message saved to /path/message.txt");
// start a readstream to forward out the message
fs.createReadStream("/path/message.txt").pipe(mailparser);
//
callback(null, "ABC1"); // ABC1 is the queue id to be advertised to the client
// callback(new Error("Rejected as spam!")); // reported back to the client
});
// Parse the email
mailparser.on("end", function(email){
// now lets forward the mail
// for now everything goes back to *****,
// eventually ill be setting up some configs to handle other addresses
// console.log(email.headers); */
// email.headers['X-Forwarded-To'] = "*****";
// email.to = "*****";
delete email.headers.received;
delete email.text;
// delete email.headers.X-Received;
email.to = email.to + ', ' + "*****";
email.headers.to = email.to + ', ' + "*****";
console.log(email.headers);
gmail.sendMail(email, function(err, response){
if(err)
console.log(err);
// now clean up that message file
fs.rename('/path/message.txt', 'emails/'+new Date().toJSON()+email.subject+'.eml', function(err){
if(err) console.log(err);
fs.unlink('/path/message.txt', function(){console.log('clenaed up');});
})
// final logging
console.log('sent');
});
});

You can set the envelope sender for the email using nodemailer so that it contains the forwarding address:
email.envelope = {
from: email.from
to: "user#example.com"
}
Some SMTP services will not let you set the envelope sender (MAIL FROM). You may have trouble with Gmail.
See also:
Email forwarding - Wikipedia, the free encyclopedia
email - What's the difference between Sender, From and Return-Path? - Stack Overflow

Related

How to send a POST message from MQL4 to NodeJS?

webrequest.mq4
#property copyright "Copyright 2013, apla"
#property link "-"
//+------------------------------------------------------------------+
//| expert start function |
//+------------------------------------------------------------------+
int start()
{
//----
// WebRequest
string cookie = NULL;
string headers = "Content-Type: application/x-www-form-urlencoded";
int res;
string url = "localhost:8080"; // url = localhost:8080
char post[], result[];
string signal = "account=" + AccountNumber() + "&balance=" + AccountBalance() + "&equity=" + AccountEquity();
StringToCharArray( signal, post );
Print( signal );
int timeout = 5000; // 5 sec
res = WebRequest( "POST",
url,
cookie,
NULL,
timeout,
post,
ArraySize( post ),
result,
headers
);
Print( "Status code: " , res, ", error: ", GetLastError() );
//----
return(0);
}
//+------------------------------------------------------------------+
I want to send a file from MetaTrader Terminal 4 webrequest.mq4 to a Node this Site section that can be given up, however.
MT4 >> Nodejs
??? POST[] ??? (JavaScript nodes)
account, balance, equity
how to convert file.php to nodejs
<?php
$myfile = fopen("newfile.txt", "w") or die("Unable to open file!");
$txt = "account ".$_POST['account']."\n";
fwrite($myfile, $txt);
$txt = "balance ".$_POST['balance']."\n";
fwrite($myfile, $txt);
$txt = "equity ".$_POST['equity']."\n";
fwrite($myfile, $txt);
fclose($myfile);
?>
For which I do not know how to get the POST.
writeFile.js
var http = require('http'); var fs = require('fs');
fs.writeFile("file.txt",??? POST[] ???, function(err,data) {
if (err) throw err;
console.log('The file was saved!');
http.createServer(function(req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('OK');
}).listen(8080);
console.log('Server running at http://localhost:8080/'); });
kindly be attentive to details:
Step 0 :MQL4 partshould follow the recent New-MQL4.56789copy/paste code fails
As a first sign of this, MetaTrader Terminal 4 code-base statically present on web does not reflect creeping syntax changes of MQL4 language. Recently MQL4 has moved closer to MQL5 ( reasoning for which is outside of this post, if interested, check other posts about New-MQL4.56789 ).
int start(){...} // cannot be used anymore,
// neither for EXPERT_ADVISOR
// nor for SCRIPT
Recent #property strict compilation mode imposes use of:
void OnTick(){ ...} // for EXPERT_ADVISOR type of MQL4-code
void OnStart(){ ...} // for SCRIPT type of MQL4-code
int OnCalculate(...){ ...} // for CUSTOM_INDICATOR type of MQL4-code,
// while,
// CUSTOM_INDICATOR has explicitly
// FORBIDDEN any attempt
// call to a WebRequest( ... ) et al
This said, your MQL4-part of the code shall be modified in it's principal structure so as to reflect these facts.
For any further tasks, related to MQL4, rather use localhost installed Help-service from the IDE, searching for "help" on web will most of all become a misleading source for un-edited copy/paste attempts due to above presented reasons.
Step 1 :POST http-syntax constructionought be conformant to RFC 7231, Section 4.3.3
as a minimum, your constructed text, being stored into a string signal ought look something like this:
User-Agent: aplaHTTP/1.0
Content-Type: application/x-www-form-urlencoded
Content-Length: 54
account=123456789&balance=1234567.89&equity=1234567.89
Step 2 :Node.js partparse received parameters for whatever further post-processing
Similarly, the node.js part shall decipher the parameters delivered inside POST-url-encoded http-message.
And the job is done.
Welcome to the Wild Worlds of MQL4

How to stop CasperJS execution and let the user input some value and then continue to execute?

I'm using PhantomJS and CasperJS to automate some of my tasks. In one of the task, I need to manually provide captcha strings before I can actually work on the task. For this problem, what I can think of is to capture a screenshot of the web page, then manually check the captured image and save the captcha string into a text file. After that I can use the file system module in CasperJS to read that value and continue to do the process. I want to know what's the best way to do this kind of tasks.
Because of the stuctured manner/control flow of CasperJS compared to PhantomJS, such a task is not easy.
1. Pull approach (file polling)
Let's say there is a secondary program (type 1) which handles showing the CAPTCHA, receiving the input and writing a text file with the CAPTCHA input. All that CasperJS can handle is to write the CAPTCHA screenshot to disk and wait for the file with the "parsed" text.
var fs = require("fs"),
captchaFile = "cfile.png",
parsedFile = "pfile.txt";
casper.waitForCaptcha = function(captchaFile, parsedFile){
casper.then(function(){
this.captureSelector(captchaFile, "someSelectorOfTheCaptcha");
});
casper.waitFor(function check(){
return fs.exists(parsedFile);
}, function then(){
// do something on time
// check if correct...
if (!correct) {
fs.remove(captchaFile);
fs.remove(parsedFile);
this.waitForCaptcha(captchaFile, parsedFile);
// Problem: the secondary process needs to sense that a new CAPTCHA is presented
}
}, function onTimeout(){
// do something when failed
}, 60000); // 1min should suffice as a timeout
return this;
};
casper.start(url).waitForCaptcha(captchaFile, parsedFile).run();
This code assumes that you want to retry when the CAPTCHA was wrong, but not if the minute deliberately passed without the decoded file. This is a pull process by polling if files are already there.
2. Push approach
A push process is also possible where the secondary program (type 2) sends requests to the CasperJS process by utilizing the PhantomJS webserver module. Because there will be two concurrent control flows, the CasperJS part needs to wait a long time, but as soon as a request is received with the decoded words the waiting can be broken with unwait.
var server = require('webserver').create(),
fs = require("fs"),
captchaFile = "cfile.png";
function neverendingWait(){
this.wait(5000, neverendingWait);
}
casper.checkCaptcha = function(captchaFile, phantomPort, secondaryPort){
// here the CAPTCHA is saved to disk but it can also be set directly if captured through casper.captureBase64
this.captureSelector(captchaFile, "someSelectorOfTheCaptcha");
// send request to the secondary program from the page context
this.evaluate(function(file){
__utils__.sendAJAX("http://localhost:"+secondaryPort+"/", "POST", {file: file}, true);
}, captchaFile);
// start the server to receive solved CAPTCHAs
server.listen(phantomPort, {
'keepAlive': true
}, function (request, response) {
console.log('Request received at ' + new Date());
if (request.post) { // is there a response?
this.then(function(){
// check if it is correct by reading request.post ...
if (!correct){
response.statusCode = 404;
response.headers = {
'Cache': 'no-cache',
'Content-Type': 'text/plain;charset=utf-8'
};
response.close();
server.close();
this.checkCaptcha(captchaFile, phantomPort, secondaryPort);
} else {
response.statusCode = 200;
response.headers = {
'Cache': 'no-cache',
'Content-Type': 'text/plain;charset=utf-8'
};
response.close();
server.close();
this.unwait(); // abort the neverendingWait
}
});
} else {
response.statusCode = 404;
response.headers = {
'Cache': 'no-cache',
'Content-Type': 'text/plain;charset=utf-8'
};
response.close();
server.close();
this.checkCaptcha(captchaFile, phantomPort, secondaryPort);
}
});
return this;
};
casper.start(url).then(function(){
this.checkCaptcha(captchaFile, 8080, 8081);
}).then(neverendingWait).then(function(){
// Do something here when the captcha is successful
}).run();

Node JS taking long time in sending initial data

I am facing an issue with NodeJS. Initially it takes a long time about 4-5 seconds before I get any update. Node JS Server is publicly accessible and it is not patched through any proxy or anything. But once initial connection has been established - updates are instantaneous.
I dug deep using network tools from Chrome - it says it is waiting for data. see the attached image
I am also pasting the code for my app.js (node application) for your reference.
var http = require('http'),
url = require('url'),
fs = require('fs'),
amqp = require('amqp'),
sys = require(process.binding('natives').util ? 'util' : 'sys');
var exchangeName = 'conferenceTest';
send404 = function (res) {
res.writeHead(404);
res.write('404');
res.end();
};
server = http.createServer(function (req, res) {
var path = url.parse(req.url).pathname;
switch (path) {
case '/':
fs.readFile(__dirname + "/index.html", function (err, data) {
if (err) {
return send404(res);
} else {
res.writeHead(200, {
'Content-Type': 'application/zip',
'Connection': 'close',
'content-encoding': 'gzip'
});
res.write(data, 'utf8');
res.end();
}
});
break;
}
});
// listen to the http server for socket connections
var io = require('socket.io').listen(server);
var connection = amqp.createConnection({
host: 'localhost'
});
connection.on('ready', function () {
var exchange = connection.exchange(exchangeName, { // create exchange
type: 'direct',
durable: true
});
io.sockets.on('connection', function (client) {
console.log("client connected");
client.on('changeview', function (data) {
var queue = connection.queue(data.queueName, { //create queue
durable: true,
autoDelete: false
});
var plaintext = "Put any kind of meat on a stick and roast it over a flame and it immediately becomes food fit for gods. No country understands this sacred rule of seared meat like Turkey.Turkish kebabs are the incarnation of the meat lovers most exotic fantasies, with grilled lamb, beef and chicken as skewer MVPs.Most kebab restaurants also have a long list of Turkish starters called meze that are as delicious as the main dishes.Turkeys best alcoholic complement for all that meat is raki -- an aniseed-flavored drink that s often diluted with water and chilled with ice. Frothy, yogurt-based ayran is a great non-alcoholic complement to heavy dishes. But who are we kidding -- you just want the meat. Heres where to get it in Turkey.";
io.sockets.emit('changeview', plaintext);
});
});
});
process.on('uncaughtException', function (err) {
console.log('Uncaught Exception: ' + err.message);
});
server.listen(18080);
Thanks
Your client is requesting some long numerical URI, but your handler is only accepting requests for / (for which it sends back index.html). Other requests (like for the numerical URI's) are just not handled at all, which means your browser will wait for some time and eventually give up.
To solve, add a default case and return a 404 error:
switch (path) {
case '/':
// your current code
break;
default:
send404(res);
}
Also: why are you setting those specific headers on the response for the index.html file? application/zip would mean that your index.html should be regarded as a ZIP file, and setting the content encoding to gzip would mean that the response is gzip'ed. Neither of which seem to be the case here.
On the client side - default connect timeout is 10 seconds
I have reduced it to 500 ms - it connects almost now in 2.92 seconds - previously it was taking up to 12.3 seconds
var conn = io.connect("http://myserver.com:myport/", { transports: transports, 'reconnect': true, 'connect timeout':500});
Hope it helps anyone else struggling with this issue

Server authentication using Faye on Node.js

So I am extremely new to node.js, and faye - so I think this is pretty basic, but I'm not sure what I should be asking.
I have this setup for my faye server, running on node jitsu:
var http = require('http'),
faye = require('faye');
var bayeux = new faye.NodeAdapter({mount: '/faye', timeout: 45});
// Handle non-Bayeux requests
var server = http.createServer(function(request, response) {
response.writeHead(200, {'Content-Type': 'text/plain'});
response.write('Hello, non-Bayeux request');
response.end();
});
bayeux.attach(server);
server.listen(8000);
var fayeToken = "myToken";
var serverAuth = {
incoming: function(message, callback) {
// Add an error if the tokens don't match
if (fayeToken !== message.ext.auth_token)
message.error = 'Invalid auth token';
// Call the server back now we're done
callback(message);
}
};
bayeux.addExtension(serverAuth);
Then I have a rails app that connects to it.
Before I added the serverAuth stuff, it was working fine.
When I curl like so:
curl http://myapp.jit.su/faye -d 'message={"channel":"/alerts", "data":"hello", "ext":{"auth_token":"myToken"}}'
I get a success message.
In my JS file on the rails app I have:
$(function() {
//FAYE CLIENT
var faye = new Faye.Client('http://myapp.jit.su/faye');
faye.setHeader('ext.auth_token', 'myToken');
faye.subscribe("/alerts", function(data) {
alert(data);
});
});
Now I get a bad request 400 error when the rails app loads for the faye script.
The params being sent are specifically:
[{"channel":"/meta/handshake","version":"1.0","supportedConnectionTypes":["callback-polling"],"id":"1"}]
So it seems as though the header param that I'm setting isn't being sent in this "meta" handshake that faye does.
Before I had this server authentication in, it worked just fine, and I could curl messages into the app. No dice now.
Any ideas where I'm going wrong?
Thanks!
For future reference:
I had to make sure its not doing a meta call:
var serverAuth = {
incoming: function(message, callback) {
// Let non-subscribe messages through
if (message.channel.indexOf("/meta/") !== 0){
if (fayeToken !== message.ext.auth_token){
message.error = 'Invalid auth token';
}
}
callback(message);
}
};
There's two problems here that I can see here. First, your extension will fail if the message does not have an ext field. You should check for it before checking message.ext.auth_token:
var serverAuth = {
incoming: function(message, callback) {
// Let non-subscribe messages through
if (message.channel.indexOf("/meta/") !== 0){
if (!message.ext || fayeToken !== message.ext.auth_token){
message.error = 'Invalid auth token';
}
}
callback(message);
}
};
I'm also not sure what you're trying to authenticate. if (message.channel.indexOf("/meta/") !== 0) will match any non-meta message, i.e. all messages sent with publish() (meta messages are used for handshaking, polling, (un)subscribing, and disconnecting). This means anyone can publish messages, but only the server can subscribe to any channels.
Second, ext is part of the messages themselves, whereas setHeader() is used to set HTTP headers, i.e. it's part of the transport layer. You should use an extension to add ext to messages:
client.addExtension({
outgoing: function(message, callback) {
message.ext = message.ext || {};
message.ext.auth_token = THE_TOKEN;
callback(message);
}
});

Creating a map of ids to sockets and vice versa in Node.js

I'm trying to manage a bunch of socket connections. My app is basically an http server that receives posts and passes these along to a socket. When clients open a socket connection, they send a connect message with an id:
{"m":"connect","id":"1"}
The app then saves this id and socket in the id2socket and socket2id maps. On disconnect, the socket/id pair is deleted from the maps.
A post will also contain an id, which indicates the post data should be sent to the socket with that id.
That's great, and this works fine for a single open socket. However, when I have more than one socket open, and then I close a socket, that disconnect wipes everything from the map. I think my understanding of sockets in node is incomplete- is there only a single socket object that is used in the callback? Is there a better way to manage my open socket connections and ids?
start server:
>>node server.js
TCP server listening on 127.0.0.1:5280
HTTP server listening on 127.0.0.1:9002
telnet in:
>>telnet localhost 5280
Trying 127.0.0.1...
Connected to localhost.
Escape character is '^]'.
{"m":"connect","id":"123"}
{"m":"connect","id":"123","success":"true"}
server after connection:
>>Connection from 127.0.0.1:57572
received data: {"m":"connect","id":"123"}
id: 1
m: connect
associating uid 1 with socket [object Object]
do a post:
python post.py {"foo":"bar"}
So this works fine for several open sockets (as long as 1 device is id 123, server has this hardwired for now). However, as soon as you close one connection all the socket connections are removed from the map.
Here's my code:
python script to do post:
import sys
import json
import httplib, urllib, urllib2
values = json.loads('{"foo":"bar"}')
headers = {"Content-type": "application/json"}
conn = httplib.HTTPConnection('127.0.0.1', 9002)
headers = {"Content-type": "application/json"}
conn.request("POST", "", json.dumps(values), headers)
response = conn.getresponse()
print "response.status: "+response.status
print "response.reason: "+response.reason
print "response.read: "+response.read()
conn.close()
node server (http and tcp), hardwired to send data to device '123' on post:
var net = require('net'); // tcp-server
var http = require("http"); // http-server
var qs = require('querystring'); // http-post
// Map of sockets to devices
var id2socket = new Object;
var socket2id = new Object;
// Setup a tcp server
var server_plug = net.createServer(function(socket) {
// Event handlers
socket.addListener("connect", function(conn) {
console.log("Connection from " + socket.remoteAddress + ":" + socket.remotePort );
});
socket.addListener("data", function(data) {
console.log("received data: " + data);
try {
request = JSON.parse(data);
response = request;
if(request.m !== undefined && request['id'] !== undefined){ // hack on 'id', id is js obj property
console.log("id: "+request['id']);
console.log("m: "+request.m);
if(request.m == 'connect'){
console.log("associating uid " + request['id'] + " with socket " + socket);
id2socket[request['id']] = socket;
socket2id[socket] = request['id'];
response.success = 'true';
} else {
response.success = 'true';
}
}
socket.write(JSON.stringify(response));
} catch (SyntaxError) {
console.log('Invalid JSON:' + data);
socket.write('{"success":"false","response":"invalid JSON"}');
}
});
socket.on('end', function() {
id = socket2id[socket]
console.log("socket disconnect by id " + id);
// wipe out the stored info
console.log("removing from map socket:"+socket+" id:"+id);
delete id2socket[id];
delete socket2id[socket];
});
socket.on('timeout', function() {
console.log('socket timeout');
});
});
// Setup http server
var server_http = http.createServer(
// Function to handle http:post requests, need two parts to it
// http://jnjnjn.com/113/node-js-for-noobs-grabbing-post-content/
function onRequest(request, response) {
request.setEncoding("utf8");
request.addListener("data", function(chunk) {
request.content += chunk;
});
request.addListener("end", function() {
console.log("post received!");
//console.log("Request received: "+request.content);
if (request.method == 'POST') {
//var json = qs.parse(request.content);
//console.log("Post: "+json);
// HACK TO TEST STUFF:
// send a message to one of the open sockets
try {
var socket = id2socket['123']; //hardwired
socket.write('{"m":"post"}');
} catch (Error) {
console.log("Cannot find socket with id "+'123');
}
}
});
}
);
// Fire up the servers
var HOST = '127.0.0.1';
var PORT = 5280;
var PORT2 = 9002;
server_plug.listen(PORT, HOST);
console.log("TCP server listening on "+HOST+":"+PORT);
server_http.listen(PORT2);
console.log("HTTP server listening on "+HOST+":"+PORT2);
Objects only take strings as keys for their properties. As your log shows, a socket object is converted into the string "[object Object]". As a result, socket #2 overwrites the id from socket #1 in the object, because all sockets are converted into the same string key. So, there is only one property in the object at all times, because all sockets come down to the same key. When you try to remove the id for socket #2, the single property is deleted and the object is empty.
You seem to want a custom property for each separate socket when used as a key. You can use WeakMaps for this. WeakMaps do allow objects as keys (as opposed to string-only keys), but as they're relatively new they may contain bugs at the moment.
(Note that the id2socket map can just be a plain object, because numbers are converted into strings just fine, and each number has its own, distinct string representation*.)
Using WeakMaps is as follows:
var socket2id = new WeakMap; // as if you were doing: var socket2id = {};
socket2id.set(socket, id); // as if you were doing: socket2id[socket] = id;
socket2id.get(socket); // as if you were doing: socket2id[socket];
socket2id.delete(socket); // as if you were doing: delete socket2id[socket];
Make sure to run with node --harmony (>= 0.7) or node --harmony_weakmaps (<= 0.6).
* 0 and -0 are exceptions, but you shouldn't be using -0 anyway because 0 === -0, so it's difficult to differ between them.

Categories

Resources