Merge branch 'lighthouse-runner'

This commit is contained in:
Nawaz Dhandala
2021-07-06 09:40:12 +01:00
21 changed files with 5934 additions and 87 deletions

View File

@@ -0,0 +1,103 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const express = require('express');
const ProbeService = require('../services/ProbeService');
const MonitorService = require('../services/monitorService');
const LighthouseLogService = require('../services/lighthouseLogService');
const router = express.Router();
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
const sendItemResponse = require('../middlewares/response').sendItemResponse;
const sendListResponse = require('../middlewares/response').sendListResponse;
const { isAuthorizedLighthouse } = require('../middlewares/lighthouseAuthorization');
// Route
// Description: Updating profile setting.
// Params:
// Param 1: req.headers-> {authorization}; req.user-> {id}; req.files-> {profilePic};
// Returns: 200: Success, 400: Error; 500: Server Error.
router.get('/monitors', isAuthorizedLighthouse, async function (req, res) {
try {
const monitors = await MonitorService.getUrlMonitors();
return sendListResponse(
req,
res,
JSON.stringify(monitors),
monitors.length
);
} catch (error) {
return sendErrorResponse(req, res, error);
}
});
router.post('/ping/:monitorId', isAuthorizedLighthouse, async function (
req,
response
) {
try {
const {
monitor,
resp,
} = req.body;
let log, data = {};
data = req.body;
data.lighthouseScanStatus =
resp && (resp.lighthouseScanStatus)
? resp.lighthouseScanStatus
: null;
data.performance =
resp && resp.performance ? resp.performance : null;
data.accessibility =
resp && resp.accessibility ? resp.accessibility : null;
data.bestPractices =
resp && resp.bestPractices ? resp.bestPractices : null;
data.seo = resp && resp.seo ? resp.seo : null;
data.pwa = resp && resp.pwa ? resp.pwa : null;
data.lighthouseData =
resp && resp.lighthouseData ? resp.lighthouseData : null;
data.monitorId = req.params.monitorId || monitor._id;
let probeId = await ProbeService.findBy();
data.probeId = probeId ? probeId[0]._id : null;
if (data.lighthouseScanStatus === 'scanning') {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScanStatus: data.lighthouseScanStatus,
},
{ fetchLightHouse: true }
);
await LighthouseLogService.updateAllLighthouseLogs(
data.monitor.projectId,
data.monitorId,
{ scanning: true }
);
} else {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScannedAt: Date.now(),
lighthouseScanStatus: data.lighthouseScanStatus, // scanned || failed
lighthouseScannedBy: data.probeId,
}
);
if (data.lighthouseData) { // The scanned results are published
data.scanning = false;
log = await ProbeService.saveLighthouseLog(data);
}
}
return sendItemResponse(req, response, log);
} catch (error) {
return sendErrorResponse(req, response, error);
}
});
module.exports = router;

View File

@@ -566,6 +566,7 @@ router.post('/ping/:monitorId', isAuthorizedProbe, async function(
)
: data.reason;
}
if (data.lighthouseScanStatus) {
if (data.lighthouseScanStatus === 'scanning') {
await Promise.all([

View File

@@ -0,0 +1,39 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
const ErrorService = require('../services/errorService');
module.exports = {
isAuthorizedLighthouse: async function(req, res, next) {
try {
let clusterKey;
if (req.params.clusterKey) {
clusterKey = req.params.clusterKey;
} else if (req.query.clusterKey) {
clusterKey = req.query.clusterKey;
} else if (req.headers['clusterKey']) {
clusterKey = req.headers['clusterKey'];
} else if (req.headers['clusterkey']) {
clusterKey = req.headers['clusterkey'];
} else if (req.body.clusterKey) {
clusterKey = req.body.clusterKey;
}
if (!clusterKey ) {
return sendErrorResponse(req, res, {
code: 400,
message: 'Authorization Rejected.',
});
}
next();
} catch (error) {
ErrorService.log('lighthouseAuthorization.isAuthorizedLighthouse', error);
throw error;
}
},
};

View File

@@ -731,6 +731,53 @@ module.exports = {
}
},
async getUrlMonitors() {
try {
const oneDay = moment()
.subtract(1, 'days')
.toDate();
const monitors = await MonitorModel.find({
$and: [
{
deleted: false,
disabled: false,
},
{
$or: [
{
lighthouseScanStatus: {
$exists: false, // Lighthouse scan status does not exist
}
},
{
lighthouseScanStatus: {
$exists: true,
$nin: ['scanning', 'scanned'] // Lighthouse scan status exist but 'failed' or the 'scan' button is clicked from UI
}
},
{ lighthouseScannedAt: { $lt: oneDay } }
]
},
{
type: {
$in: [
'url',
],
},
},
],
});
return monitors;
} catch (error) {
ErrorService.log('monitorService.getUrlMonitors', error);
throw error;
}
},
async updateMonitorPingTime(id) {
try {
const newdate = new Date();

View File

@@ -207,10 +207,8 @@ app.use(
require('./backend/api/scheduledEvent')
);
app.use(['/probe', '/api/probe'], require('./backend/api/probe'));
app.use(
['/application', '/api/application'],
require('./backend/api/applicationScanner')
);
app.use(['/application', '/api/application'], require('./backend/api/applicationScanner'));
app.use(['/lighthouse', '/api/lighthouse'], require('./backend/api/lighthouse'));
app.use(['/version', '/api/version'], require('./backend/api/version'));
app.use(['/tutorial', '/api/tutorial'], require('./backend/api/tutorial'));
app.use(['/audit-logs', '/api/audit-logs'], require('./backend/api/auditLogs'));

View File

@@ -285,5 +285,23 @@ services:
# Use node modules of the container and not host system.
# https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder
- /usr/src/app/node_modules/
lighthouse-runner:
ports:
- '3015:3015'
- '9241:9229' # Debugging port.
build:
context: ./lighthouse-runner
dockerfile: ./Dockerfile.dev
env_file:
- ./lighthouse-runner/.env
environment:
- PORT=3015
- SERVER_URL=http://backend:3002
- IS_SAAS_SERVICE=${IS_SAAS_SERVICE}
volumes:
- ./lighthouse-runner:/usr/src/app
# Use node modules of the container and not host system.
# https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder
- /usr/src/app/node_modules/
volumes:
mongodata:

View File

@@ -195,6 +195,18 @@ services:
- IS_SAAS_SERVICE=${IS_SAAS_SERVICE}
depends_on:
- backend
lighthouse-runner:
ports:
- '3015:3015'
build: ./lighthouse-runner
env_file:
- ./lighthouse-runner/.env
environment:
- PORT=3015
- SERVER_URL=http://backend:3002
- IS_SAAS_SERVICE=${IS_SAAS_SERVICE}
depends_on:
- backend
volumes:
mongodata:

2
lighthouse-runner/.env Normal file
View File

@@ -0,0 +1,2 @@
CLUSTER_KEY=f414c23b4cdf4e84a6a66ecfd528eff2
SERVER_URL=http://localhost:3002

36
lighthouse-runner/Dockerfile Executable file
View File

@@ -0,0 +1,36 @@
#
# Fyipe-backend Dockerfile
#
# Pull base image nodejs image.
FROM node:16
#SET ENV Variables
ENV PRODUCTION=true
ENV CHROME_PATH=/usr/bin/google-chrome
# Install Chrome.
RUN \
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
apt-get update && \
apt-get install -y google-chrome-stable && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
# Install app dependencies
COPY package*.json /usr/src/app/
RUN npm ci --only=production
# Bundle app source
COPY . /usr/src/app
# Expose ports.
# - 3015: Lighthouse Runner
EXPOSE 3015
#Run the app
CMD [ "npm", "start"]

View File

@@ -0,0 +1,38 @@
#
# Fyipe-backend Dockerfile
#
# Pull base image nodejs image.
FROM node:16
#SET ENV Variables
ENV CHROME_PATH=/usr/bin/google-chrome
WORKDIR /usr/src/app
# Install Chrome.
RUN \
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
apt-get update && \
apt-get install -y google-chrome-stable && \
rm -rf /var/lib/apt/lists/*
# Install app dependencies
RUN cd /usr/src/app
# Copy package.json files
COPY ./package.json /usr/src/app/package.json
COPY ./package-lock.json /usr/src/app/package-lock.json
RUN npm ci
# Expose ports.
# - 3015: Lighthouse Runner
EXPOSE 3015
#Run the app
CMD [ "npm", "run", "dev"]

View File

@@ -0,0 +1,73 @@
const { NODE_ENV } = process.env;
if (!NODE_ENV || NODE_ENV === 'development') {
// Load env vars from /backend/.env
require('custom-env').env();
}
process.on('exit', () => {
/* eslint-disable no-console */
console.log('Lighthouse Shutting Shutdown');
});
process.on('unhandledRejection', err => {
/* eslint-disable no-console */
console.error('Unhandled rejection in Lighthouse process occurred');
/* eslint-disable no-console */
console.error(err);
});
process.on('uncaughtException', err => {
/* eslint-disable no-console */
console.error('Uncaught exception in Lighthouse process occurred');
/* eslint-disable no-console */
console.error(err);
});
const express = require('express');
const app = express();
const http = require('http').createServer(app);
const cors = require('cors');
const Main = require('./workers/main');
const cron = require('node-cron');
const config = require('./utils/config');
const cronMinuteStartTime = Math.floor(Math.random() * 50);
app.use(cors());
app.set('port', process.env.PORT || 3015);
http.listen(app.get('port'), function() {
// eslint-disable-next-line
console.log(
`Lighthouse Started on port ${app.get('port')}. Fyipe API URL: ${
config.serverUrl
}`
);
});
app.get('/', function(req, res) {
res.setHeader('Content-Type', 'application/json');
res.send(
JSON.stringify({
status: 200,
message: 'Service Status - OK',
serviceType: 'fyipe-lighthouse',
})
);
});
//App Version
app.get(['/lighthouse/version', '/version'], function(req, res) {
res.setHeader('Content-Type', 'application/json');
res.send({ lighthouseVersion: process.env.npm_package_version });
});
// This cron runs every 10 minute.
cron.schedule('*/10 * * * *', () => {
setTimeout(() => {
Main.runJob();
}, cronMinuteStartTime * 1000);
});
module.exports = app;

5194
lighthouse-runner/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
{
"name": "lighthouse-runner",
"version": "3.0.0",
"description": "Lighthouse Runner for Url Monitor",
"main": "index.js",
"scripts": {
"start": "node --max-http-header-size=80000 index.js",
"dev": "nodemon --inspect=0.0.0.0 --max-http-header-size=80000 index.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "David Adewole",
"license": "MIT",
"dependencies": {
"axios": "^0.21.1",
"chrome-launcher": "^0.14.0",
"cors": "^2.8.5",
"custom-env": "^2.0.1",
"express": "^4.17.1",
"get-ssl-certificate": "^2.3.3",
"lighthouse": "^8.0.0",
"moment": "^2.29.1",
"node-cron": "^3.0.0",
"node-fetch": "^2.6.1",
"ping": "^0.4.1",
"winston": "^3.3.3",
"winston-slack-transport": "^2.0.0"
},
"devDependencies": {
"nodemon": "^2.0.7"
}
}

106
lighthouse-runner/utils/api.js Executable file
View File

@@ -0,0 +1,106 @@
const axios = require('axios');
const config = require('./config');
const _this = {
getHeaders: () => {
return {
'Access-Control-Allow-Origin': '*',
Accept: 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
clusterKey: config.clusterKey,
};
},
postApi: (url, data) => {
const headers = _this.getHeaders();
return new Promise((resolve, reject) => {
axios({
method: 'POST',
url: `${config.serverUrl}/${url}`,
headers,
data,
})
.then(function(response) {
resolve(response.data);
})
.catch(function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
getApi: url => {
const headers = _this.getHeaders();
return new Promise((resolve, reject) => {
axios({
method: 'GET',
url: `${config.serverUrl}/${url}`,
headers,
})
.then(function(response) {
resolve(response.data);
})
.catch(function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
putApi: (url, data) => {
const headers = _this.getHeaders();
return new Promise((resolve, reject) => {
axios({
method: 'PUT',
url: `${config.serverUrl}/${url}`,
headers,
data,
})
.then(function(response) {
resolve(response.data);
})
.catch(function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
deleteApi: (url, data) => {
const headers = _this.getHeaders();
return new Promise((resolve, reject) => {
axios({
method: 'DELETE',
url: `${config.serverUrl}/${url}`,
headers,
data,
})
.then(function(response) {
resolve(response.data);
})
.catch(function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
};
module.exports = _this;

View File

@@ -0,0 +1,44 @@
const packageJson = require('../package.json');
const COMMAND = {
linux: {
load: "top -b -n 2 | egrep --color 'load average|%Cpu'",
cpu: "egrep --color 'processor|cores' /proc/cpuinfo",
mem: "egrep --color 'Mem|Swap' /proc/meminfo",
disk: "df -h | egrep --color '/dev/xvda1|/dev/sda7|/dev/nvme0n1p1'",
temp: "sensors | egrep --color 'CPU'",
},
darwin: {
load: "top -l 1 | egrep --color 'Load Avg|CPU usage'",
cpu: 'sysctl -n machdep.cpu.core_count',
mem: {
used: "top -l 1 | egrep --color 'PhysMem'",
total: 'sysctl -n hw.memsize',
swap: 'sysctl -n vm.swapusage',
},
disk: "df -h | egrep --color '/dev/disk1s2'",
temp: 'sysctl -n machdep.xcpm.cpu_thermal_level',
},
win: {
load: 'wmic cpu get loadpercentage',
cpu: 'wmic cpu get numberofcores',
mem: {
free: 'wmic os get freephysicalmemory',
total: 'wmic computersystem get totalphysicalmemory',
totalSwap: 'wmic os get totalvirtualmemorySize',
freeSwap: 'wmic os get freevirtualmemory',
},
disk: {
total: 'wmic logicaldisk get size',
free: 'wmic logicaldisk get freespace',
},
temp: 'wmic computersystem get thermalstate',
},
};
module.exports = {
COMMAND,
serverUrl: process.env['SERVER_URL'],
clusterKey: process.env['CLUSTER_KEY'],
lighthouseVersion: packageJson.version,
};

View File

@@ -0,0 +1,32 @@
const winston = require('winston');
const Slack = require('winston-slack-transport');
if (
process.env.PORT &&
process.env.SLACK_ERROR_LOG_WEBHOOK &&
process.env.SLACK_ERROR_LOG_CHANNEL
) {
winston.add(Slack, {
webhook_url: process.env.SLACK_ERROR_LOG_WEBHOOK,
channel: '#' + process.env.SLACK_ERROR_LOG_CHANNEL,
username: 'Error Bot',
handleExceptions: true,
});
}
module.exports = {
log: (functionName, error) => {
error = error && error.message ? error.message : error;
winston.error(
JSON.stringify(
{
error: String(error),
functionName: String(functionName),
stack: new Error().stack,
},
0,
2
)
);
},
};

View File

@@ -0,0 +1,33 @@
const postApi = require('./api').postApi;
module.exports = {
headers: async (val, type) => {
const header = {};
if (type && type.length) {
header['Content-Type'] = type;
}
if (val && val.length) {
val.forEach(head => {
header[head.key] = head.value;
});
}
return header;
},
body: async (val, type) => {
let bodyContent = {};
if (type && type === 'formData' && val && val[0] && val[0].key) {
val.forEach(bod => {
bodyContent[bod.key] = bod.value;
});
bodyContent = JSON.stringify(bodyContent);
} else if (type && type === 'text' && val && val.length) {
bodyContent = val;
}
return bodyContent;
},
ping: async function(monitorId, data) {
return await postApi(`lighthouse/ping/${monitorId}`, data);
},
};

View File

@@ -0,0 +1,27 @@
const getApi = require('../utils/api').getApi;
const UrlMonitors = require('./urlMonitors');
const ErrorService = require('../utils/errorService');
module.exports = {
runJob: async function() {
try {
let monitors = await getApi('lighthouse/monitors');
monitors = JSON.parse(monitors.data); // parse the stringified data
await Promise.all(
monitors.map(monitor => {
if(monitor.type === 'url'){
const probe = monitor.pollTime.filter(probe => probe.probeId);
if(probe.length > 0){ // This checks that the ssl result has already been published i.e probe is runnning.
return UrlMonitors.ping(monitor);
}else{
ErrorService.log('getApi',"Please Make Sure Probe Server is Online.")
}
}
return null;
})
);
} catch (error) {
ErrorService.log('getApi', error);
}
},
}

View File

@@ -0,0 +1,95 @@
/* eslint-disable no-console */
const UrlService = require('../utils/urlService');
const ErrorService = require('../utils/errorService');
const { fork } = require('child_process');
const moment = require('moment');
// This runs the lighthouse of URL Monitors
module.exports = {
ping: async monitor => {
try {
if (monitor && monitor.type) {
if (monitor.data.url) {
const now = new Date().getTime();
const scanIntervalInDays = monitor.lighthouseScannedAt
? moment(now).diff(
moment(monitor.lighthouseScannedAt),
'days'
)
: -1;
if (
(monitor.lighthouseScanStatus &&
monitor.lighthouseScanStatus === 'scan') ||
(monitor.lighthouseScanStatus &&
monitor.lighthouseScanStatus === 'failed') ||
((!monitor.lighthouseScannedAt ||
scanIntervalInDays > 0) &&
(!monitor.lighthouseScanStatus ||
monitor.lighthouseScanStatus !== 'scanning'))
) {
await UrlService.ping(monitor._id, {
monitor,
resp: { lighthouseScanStatus: 'scanning' },
});
const sites = monitor.siteUrls;
let failedCount = 0;
for (const url of sites) {
try {
const resp = await lighthouseFetch(
monitor,
url
);
await UrlService.ping(monitor._id, {
monitor,
resp,
});
} catch (error) {
failedCount++;
ErrorService.log(
'lighthouseFetch',
error.error
);
}
}
}
}
}
} catch (error) {
ErrorService.log('UrlMonitors.ping', error);
throw error;
}
},
};
const lighthouseFetch = (monitor, url) => {
return new Promise((resolve, reject) => {
const lighthouseWorker = fork('./utils/lighthouse');
const timeoutHandler = setTimeout(async () => {
await processLighthouseScan({
data: { url },
error: { message: 'TIMEOUT' },
});
}, 300000);
lighthouseWorker.send(url);
lighthouseWorker.on('message', async result => {
await processLighthouseScan(result);
});
async function processLighthouseScan(result) {
clearTimeout(timeoutHandler);
lighthouseWorker.removeAllListeners();
if (result.error) {
reject({ lighthouseScanStatus: 'failed', ...result });
} else {
resolve({ lighthouseScanStatus: 'scanned', ...result });
}
}
});
};

View File

@@ -27,7 +27,7 @@ module.exports = {
const { res, resp, rawResp } = await pingfetch(
monitor.data.url
);
const response = await ApiService.ping(monitor._id, {
monitor,
res,
@@ -36,68 +36,12 @@ module.exports = {
type: monitor.type,
retryCount,
});
if (response && !response.retry) {
retry = false;
} else {
retryCount++;
}
}
const now = new Date().getTime();
const scanIntervalInDays = monitor.lighthouseScannedAt
? moment(now).diff(
moment(monitor.lighthouseScannedAt),
'days'
)
: -1;
if (
(monitor.lighthouseScanStatus &&
monitor.lighthouseScanStatus === 'scan') ||
(monitor.lighthouseScanStatus &&
monitor.lighthouseScanStatus === 'failed') ||
((!monitor.lighthouseScannedAt ||
scanIntervalInDays > 0) &&
(!monitor.lighthouseScanStatus ||
monitor.lighthouseScanStatus !== 'scanning'))
) {
await ApiService.ping(monitor._id, {
monitor,
resp: { lighthouseScanStatus: 'scanning' },
});
const sites = monitor.siteUrls;
let failedCount = 0;
for (const url of sites) {
try {
const resp = await lighthouseFetch(
monitor,
url
);
await ApiService.ping(monitor._id, {
monitor,
resp,
});
} catch (error) {
failedCount++;
ErrorService.log(
'lighthouseFetch',
error.error
);
}
}
await ApiService.ping(monitor._id, {
monitor,
resp: {
lighthouseScanStatus:
failedCount === sites.length
? 'failed'
: 'scanned',
},
});
}
}
}
} catch (error) {
@@ -205,29 +149,3 @@ const pingfetch = async url => {
};
};
const lighthouseFetch = (monitor, url) => {
return new Promise((resolve, reject) => {
const lighthouseWorker = fork('./utils/lighthouse');
const timeoutHandler = setTimeout(async () => {
await processLighthouseScan({
data: { url },
error: { message: 'TIMEOUT' },
});
}, 300000);
lighthouseWorker.send(url);
lighthouseWorker.on('message', async result => {
await processLighthouseScan(result);
});
async function processLighthouseScan(result) {
clearTimeout(timeoutHandler);
lighthouseWorker.removeAllListeners();
if (result.error) {
reject({ status: 'failed', ...result });
} else {
resolve({ status: 'scanned', ...result });
}
}
});
};