Ad
Is There Some Kind Of AWS Setting That Related With How Many Long-polling Concurrent Requests Node.js Can Handle?
I'm setting up a new application for long-polling messages with interval of 10 sec from AWS sqs. I've tried to test it. And after 80 users that waiting their requests latency start growing and reach 15 seconds and reached 30 second with 300 users. Is it something wrong with my code or aws have some type of setting for it?
const port = process.env.PORT || 3001;
const express = require('express');
const app = express();
const AWS = require('aws-sdk');
AWS.config.update({region: 'eu-west-1'});
const MD5 = function(d){<md5function>}
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
const SQS = new AWS.SQS({
region: 'eu-west-1'
});
const LONG_POLL_TIMEOUT = 10;
async function checkQueue(req, res) {
const {version, token} = req.params;
const auth = req.query.auth;
if (!isTokenValid(token, auth)) {
await sleep(LONG_POLL_TIMEOUT * 1000);
res.send()
} else {
getUpdateMessage(version, token, res);
}
}
function getUpdateMessage(version, token, res) {
const urlParams = {
QueueName: `_version-queue-${version}-${token}`
};
SQS.getQueueUrl(urlParams, (urlErr, urlData) => {
if (urlErr) {
res.status(204).send();
} else {
const messageParams = {
QueueUrl: urlData.QueueUrl,
WaitTimeSeconds: LONG_POLL_TIMEOUT,
};
SQS.receiveMessage(messageParams, (err, data) => {
if (err) {
res.status(204).send();
} else {
if (data.Messages) {
res.send(data.Messages[0].Body);
SQS.deleteMessage({
QueueUrl: urlData.QueueUrl,
ReceiptHandle: data.Messages[0].ReceiptHandle
}, (err1, data) => {
if (err1) {
}
});
} else {
res.send();
}
}
});
}
});
}
function isTokenValid(token, auth) {
// check against tokens for last 14 days
let dayNumber = Math.ceil(Date.now() / (24 * 3600 * 1000));
for (let i = 0; i < 14; i++) {
const stringToHash = `<string>`;
if (MD5(stringToHash) == auth) {
return true;
}
dayNumber--;
}
return false;
}
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
next();
});
app.get('/versions/:version/long_poll_updates/:token', function (req, res) {
checkQueue(req, res);
});
app.get('/check', function (req, res) {
res.send('I\'m ok!');
});
app.use((req, res) => {
res.status(404).send("Sorry, that route doesn't exist. Have a nice day :)");
});
app.listen(port, () => {
console.log('Server running at http://127.0.0.1:' + port + '/');
});
CPU Utilisation was less then 10 percent.
Ad
Answer
Here's the detailed documentation:
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/node-configuring-maxsockets.html
When using the default of https, the SDK takes the maxSockets value from the globalAgent. If the maxSockets value is not defined or is Infinity, the SDK assumes a maxSockets value of 50.
Ad
source: stackoverflow.com
Related Questions
- → Maximum call stack exceeded when instantiating class inside of a module
- → Browserify api: how to pass advanced option to script
- → Node.js Passing object from server.js to external modules?
- → gulp-rename makes copies, but does not replace
- → requiring RX.js in node.js
- → Remove an ObjectId from an array of objectId
- → Can not connect to Redis
- → React: How to publish page on server using React-starter-kit
- → Express - better pattern for passing data between middleware functions
- → Can't get plotly + node.js to stream data coming through POST requests
- → IsGenerator implementation
- → Async/Await not waiting
- → (Socket.io on nodejs) Updating div with mysql data stops without showing error
Ad