lighthouse-runner

This commit is contained in:
David Adewole
2021-07-01 14:28:17 +01:00
parent c179b49845
commit ea017e3ed7
19 changed files with 118 additions and 545 deletions

View File

@@ -4,6 +4,6 @@ STRIPE_PUBLIC_KEY=pk_test_UynUDrFmbBmFVgJXd9EZCvBj00QAVpdwPv
AMPLITUDE_PUBLIC_KEY=cb70632f45c1ca7fe6180812c0d6494a
SKIP_PREFLIGHT_CHECK=true
PUBLIC_URL=/accounts
#REACT_APP_IS_SAAS_SERVICE=true
#IS_SAAS_SERVICE=true
REACT_APP_IS_SAAS_SERVICE=true
IS_SAAS_SERVICE=true
#REACT_APP_DISABLE_SIGNUP=true

View File

@@ -10,9 +10,9 @@ MONGO_URL=mongodb://localhost:27017/fyipedb
REDIS_HOST=localhost
CLUSTER_KEY=f414c23b4cdf4e84a6a66ecfd528eff2
TEST_TWILIO_NUMBER=+919910568840
#IS_SAAS_SERVICE=true
IS_SAAS_SERVICE=true
ENCRYPTION_KEY=01234567890123456789012345678901
#IS_TESTING=true
IS_TESTING=true
PUSHNOTIFICATION_PRIVATE_KEY=8aXTsH48-cegK-xBApLxxOezCOZIjaWpg81Dny2zbio
PUSHNOTIFICATION_PUBLIC_KEY=BFAPbOTTU14VbTe_dnoYlVnOPLKUNm8GYmC50n3i4Ps64sk1Xqx8e894Clrscn1L2PsQ8-l4SsJVw7NRg4cx69Y
PUSHNOTIFICATION_URL=mailto:support@fyipe.com

View File

@@ -5,11 +5,8 @@
*/
const express = require('express');
const LighthouseService = require('../services/LighthouseService');
const ProbeService = require('../services/ProbeService');
const MonitorService = require('../services/monitorService');
const ProjectService = require('../services/projectService');
const MonitorLogService = require('../services/monitorLogService');
const LighthouseLogService = require('../services/lighthouseLogService');
const router = express.Router();
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
@@ -26,31 +23,8 @@ const { isAuthorizedLighthouse } = require('../middlewares/lighthouseAuthorizati
router.get('/monitors', isAuthorizedLighthouse, async function (req, res) {
try {
const monitors = await MonitorService.getUrlMonitors(
req.lighthouse.id,
new Date(new Date().getTime() - 60 * 1000)
);
//Update the lastAlive in the lighthouse servers list located in the status pages.
if (monitors.length > 0) {
const projectIds = {};
for (const monitor of monitors) {
const project = await ProjectService.findOneBy({
_id: monitor.projectId,
});
const projectId = project
? project.parentProjectId
? project.parentProjectId._id
: project._id
: monitor.projectId;
projectIds[projectId] = true;
}
for (const projectId of Object.keys(projectIds)) {
const lighthouse = await LighthouseService.findOneBy({
_id: req.lighthouse.id,
});
global.io.emit(`updatelighthouse-${projectId}`, lighthouse);
}
}
const monitors = await MonitorService.getUrlMonitors();
return sendListResponse(
req,
res,
@@ -66,115 +40,17 @@ router.post('/ping/:monitorId', isAuthorizedLighthouse, async function (
req,
response
) {
// let release;
try {
const {
monitor,
res,
resp,
rawResp,
type,
retryCount,
} = req.body;
let status,
log,
reason,
data = {};
let matchedCriterion;
if (type === 'url') {
const {
stat: validUp,
successReasons: upSuccessReasons,
failedReasons: upFailedReasons,
matchedCriterion: matchedUpCriterion,
} = await (monitor && monitor.criteria && monitor.criteria.up
? ProbeService.conditions(
monitor.type,
monitor.criteria.up,
res,
resp,
rawResp
)
: { stat: false, successReasons: [], failedReasons: [] });
const {
stat: validDegraded,
successReasons: degradedSuccessReasons,
failedReasons: degradedFailedReasons,
matchedCriterion: matchedDegradedCriterion,
} = await (monitor &&
monitor.criteria &&
monitor.criteria.degraded
? ProbeService.conditions(
monitor.type,
monitor.criteria.degraded,
res,
resp,
rawResp
)
: { stat: false, successReasons: [], failedReasons: [] });
const {
stat: validDown,
successReasons: downSuccessReasons,
failedReasons: downFailedReasons,
matchedCriterion: matchedDownCriterion,
} = await (monitor && monitor.criteria && monitor.criteria.down
? ProbeService.conditions(
monitor.type,
[
...monitor.criteria.down.filter(
criterion => criterion.default !== true
),
],
res,
resp,
rawResp
)
: { stat: false, successReasons: [], failedReasons: [] });
if (validUp) {
status = 'online';
reason = upSuccessReasons;
matchedCriterion = matchedUpCriterion;
} else if (validDegraded) {
status = 'degraded';
reason = [...degradedSuccessReasons, ...upFailedReasons];
matchedCriterion = matchedDegradedCriterion;
} else if (validDown) {
matchedCriterion = matchedDownCriterion;
status = 'offline';
reason = [
...downSuccessReasons,
...degradedFailedReasons,
...upFailedReasons,
];
} else {
status = 'offline';
reason = [
...downFailedReasons,
...degradedFailedReasons,
...upFailedReasons,
];
if (monitor.criteria.down) {
matchedCriterion = monitor.criteria.down.find(
criterion => criterion.default === true
);
}
}
data.status = status;
data.reason = reason;
}
console.log("Lighthouse Ping body: ", req.body);
let log, data = {};
data = req.body;
data.responseTime = res || 0;
data.responseStatus = resp && resp.status ? resp.status : null;
data.status = status;
data.sslCertificate =
resp && resp.sslCertificate ? resp.sslCertificate : null;
data.lighthouseScanStatus =
resp && resp.lighthouseScanStatus
resp && (resp.lighthouseScanStatus)
? resp.lighthouseScanStatus
: null;
data.performance =
@@ -187,91 +63,40 @@ router.post('/ping/:monitorId', isAuthorizedLighthouse, async function (
data.pwa = resp && resp.pwa ? resp.pwa : null;
data.lighthouseData =
resp && resp.lighthouseData ? resp.lighthouseData : null;
data.retryCount = retryCount || 0;
data.reason = reason;
data.response = rawResp;
data.matchedCriterion = matchedCriterion;
// update monitor to save the last matched criterion
await MonitorService.updateOneBy(
{
_id: monitor._id,
},
{
lastMatchedCriterion: matchedCriterion,
}
);
data.monitorId = req.params.monitorId || monitor._id;
let probeId = await ProbeService.findBy();
data.probeId = probeId ? probeId[0]._id : null;
data.reason =
data && data.reason && data.reason.length
? data.reason.filter(
(item, pos, self) => self.indexOf(item) === pos
)
: data.reason;
const index =
data.reason && data.reason.indexOf('Request Timed out');
if (index > -1) {
data.reason =
data && data.reason && data.reason.length
? data.reason.filter(
item => !item.includes('Response Time is')
)
: data.reason;
}
if (data.lighthouseScanStatus) {
if (data.lighthouseScanStatus === 'scanning') {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScanStatus: data.lighthouseScanStatus,
},
{ fetchLightHouse: true }
);
await LighthouseLogService.updateAllLighthouseLogs(
data.monitor.projectId,
data.monitorId,
{ scanning: true }
);
} else {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScannedAt: Date.now(),
lighthouseScanStatus: data.lighthouseScanStatus, // scanned || failed
lighthouseScannedBy: data.probeId,
}
);
}
if (data.lighthouseScanStatus === 'scanning') {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScanStatus: data.lighthouseScanStatus,
},
{ fetchLightHouse: true }
);
await LighthouseLogService.updateAllLighthouseLogs(
data.monitor.projectId,
data.monitorId,
{ scanning: true }
);
} else {
if (data.lighthouseData) {
await MonitorService.updateOneBy(
{ _id: data.monitorId },
{
lighthouseScannedAt: Date.now(),
lighthouseScanStatus: data.lighthouseScanStatus, // scanned || failed
lighthouseScannedBy: data.probeId,
}
);
if (data.lighthouseData) { // The scanned result are published
data.scanning = false;
log = await ProbeService.saveLighthouseLog(data);
} else {
data.matchedUpCriterion =
monitor && monitor.criteria && monitor.criteria.up;
data.matchedDownCriterion =
monitor && monitor.criteria && monitor.criteria.down;
data.matchedDegradedCriterion =
monitor &&
monitor.criteria &&
monitor.criteria.degraded;
log = await ProbeService.saveMonitorLog(data);
}
}
return sendItemResponse(req, response, log);
} catch (error) {
return sendErrorResponse(req, response, error);
} finally {
// if (release) {
// release();
// }
}
});

View File

@@ -3,54 +3,17 @@
* Copyright HackerBay, Inc.
*
*/
const LighthouseService = require('../services/lighthouseService');
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
const ErrorService = require('../services/errorService');
const CLUSTER_KEY = process.env.CLUSTER_KEY;
module.exports = {
isAuthorizedLighthouse: async function (req, res, next) {
isAuthorizedLighthouse: async function(req, res, next) {
try {
let lighthouseKey, lighthouseName, clusterKey, lighthouseVersion;
if (req.params.lighthousekey) {
lighthouseKey = req.params.lighthousekey;
} else if (req.query.lighthouseKey) {
lighthouseKey = req.query.lighthousekey;
} else if (req.headers['lighthousekey']) {
lighthouseKey = req.headers['lighthousekey'];
} else if (req.headers['lighthousekey']) {
lighthouseKey = req.headers['lighthousekey'];
} else if (req.body.lighthouseKey) {
lighthouseKey = req.body.lighthouseKey;
} else {
return sendErrorResponse(req, res, {
code: 400,
message: 'lighthouse Key not found.',
});
}
if (req.params.lighthousename) {
lighthouseName = req.params.lighthousename;
} else if (req.query.lighthousename) {
lighthouseName = req.query.lighthousename;
} else if (req.headers['lighthousename']) {
lighthouseName = req.headers['lighthousename'];
} else if (req.headers['lighthousename']) {
lighthouseName = req.headers['lighthousename'];
} else if (req.body.lighthouseName) {
lighthouseName = req.body.lighthousename;
} else {
return sendErrorResponse(req, res, {
code: 400,
message: 'lighthouse Name not found.',
});
}
let clusterKey;
if (req.params.clusterKey) {
clusterKey = req.params.clusterkey;
clusterKey = req.params.clusterKey;
} else if (req.query.clusterKey) {
clusterKey = req.query.clusterkey;
clusterKey = req.query.clusterKey;
} else if (req.headers['clusterKey']) {
clusterKey = req.headers['clusterKey'];
} else if (req.headers['clusterkey']) {
@@ -59,70 +22,13 @@
clusterKey = req.body.clusterKey;
}
if (req.params.lighthouseversion) {
lighthouseVersion = req.params.lighthouseversion;
} else if (req.query.lighthouseversion) {
lighthouseVersion = req.query.lighthouseversion;
} else if (req.headers['lighthouseversion']) {
lighthouseVersion = req.headers['lighthouseversion'];
} else if (req.body.lighthouseversion) {
lighthouseVersion = req.body.lighthouseversion;
}
let lighthouse = null;
if (clusterKey && clusterKey === CLUSTER_KEY) {
// if cluster key matches then just query by lighthouse name,
// because if the lighthouse key does not match, we can update lighthouse key later
// without updating mongodb database manually.
lighthouse = await LighthouseService.findOneBy({ lighthouseName });
} else {
lighthouse = await LighthouseService.findOneBy({ lighthouseKey, lighthouseName });
}
if (!lighthouse && (!clusterKey || clusterKey !== CLUSTER_KEY)) {
if (!clusterKey ) {
return sendErrorResponse(req, res, {
code: 400,
message: 'lighthouse key and lighthouse name do not match.',
message: 'Authorization Rejected.',
});
}
if (!lighthouse) {
//create a new lighthouse.
lighthouse = await LighthouseService.create({
lighthouseKey,
lighthouseName,
lighthouseVersion,
});
}
if (lighthouse.lighthouseKey !== lighthouseKey) {
//update lighthouse key becasue it does not match.
await LighthouseService.updateOneBy(
{
lighthouseName,
},
{ lighthouseKey }
);
}
req.lighthouse = {};
req.lighthouse.id = lighthouse._id;
//Update lighthouse version
const lighthouseValue = await LighthouseService.findOneBy({
lighthouseKey,
lighthouseName,
});
if (!lighthouseValue.version || lighthouseValue.version !== lighthouseVersion) {
await LighthouseService.updateOneBy(
{
lighthouseName,
},
{ version: lighthouseVersion }
);
}
next();
} catch (error) {
ErrorService.log('lighthouseAuthorization.isAuthorizedLighthouse', error);

View File

@@ -1,21 +0,0 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const mongoose = require('../config/db');
const Schema = mongoose.Schema;
const lighthouseSchema = new Schema({
createdAt: { type: Date, default: Date.now },
lighthouseKey: { type: String },
lighthouseName: { type: String },
version: { type: String },
lastAlive: { type: Date, default: Date.now },
deleted: { type: Boolean, default: false },
deletedAt: { type: Date },
});
module.exports = mongoose.model('lighthouse', lighthouseSchema);

View File

@@ -1,73 +0,0 @@
module.exports = {
create: async function(data) {
try {
const _this = this;
let lighthouseKey;
if (data.lighthouseKey) {
lighthouseKey = data.lighthouseKey;
} else {
lighthouseKey = uuidv1();
}
const storedlighthouse = await _this.findOneBy({
lighthouseName: data.lighthouseName,
});
if (storedlighthouse && storedlighthouse.lighthouseName) {
const error = new Error('lighthouse name already exists.');
error.code = 400;
ErrorService.log('lighthouse.create', error);
throw error;
} else {
const lighthouse = new LighthouseModel();
lighthouse.lighthouseKey = lighthouseKey;
lighthouse.lighthouseName = data.lighthouseName;
lighthouse.version = data.lighthouseVersion;
const savedlighthouse = await lighthouse.save();
return savedlighthouse;
}
} catch (error) {
ErrorService.log('lighthouseService.create', error);
throw error;
}
},
updateOneBy: async function(query, data) {
try {
if (!query) {
query = {};
}
query.deleted = false;
const lighthouse = await LighthouseModel.findOneAndUpdate(
query,
{ $set: data },
{
new: true,
}
);
return lighthouse;
} catch (error) {
ErrorService.log('lighthouseService.updateOneBy', error);
throw error;
}
},
findOneBy: async function(query) {
try {
if (!query) {
query = {};
}
query.deleted = false;
const lighthouse = await LighthouseModel.findOne(query, {
deleted: false,
}).lean();
return lighthouse;
} catch (error) {
ErrorService.log('lighthouseService.findOneBy', error);
throw error;
}
},
}
const LighthouseModel = require('../models/lighthouse');
const ErrorService = require('./errorService');

View File

@@ -5,7 +5,7 @@ module.exports = {
//Params:
//Param 1: data: MonitorModal.
//Returns: promise with monitor model or error.
create: async function(data) {
create: async function (data) {
try {
const _this = this;
let subProject = null;
@@ -74,8 +74,8 @@ module.exports = {
plan.category === 'Startup'
? 5
: plan.category === 'Growth'
? 10
: 0;
? 10
: 0;
if (
count < userCount * monitorCount ||
!IS_SAAS_SERVICE ||
@@ -178,7 +178,7 @@ module.exports = {
}
},
updateOneBy: async function(query, data, unsetData) {
updateOneBy: async function (query, data, unsetData) {
const _this = this;
try {
@@ -254,7 +254,7 @@ module.exports = {
}
},
updateBy: async function(query, data) {
updateBy: async function (query, data) {
try {
if (!query) {
query = {};
@@ -354,7 +354,7 @@ module.exports = {
}
},
deleteBy: async function(query, userId) {
deleteBy: async function (query, userId) {
try {
if (!query) {
query = {};
@@ -695,9 +695,8 @@ module.exports = {
}
},
async getUrlMonitors(lighthouseId, date) {
async getUrlMonitors() {
try {
const newdate = new Date();
const monitors = await MonitorModel.find({
$and: [
{
@@ -707,73 +706,28 @@ module.exports = {
{
$or: [
{
$and: [
{
type: {
$in: [
'url',
],
},
},
{
$or: [
{
pollTime: {
$elemMatch: {
lighthouseId,
date: { $lt: date },
},
},
},
{
//pollTime doesn't include the probeId yet.
pollTime: {
$not: {
$elemMatch: {
lighthouseId,
},
},
},
},
],
},
],
lighthouseScanStatus: {
$exists: false,
}
},
],
{
lighthouseScanStatus: {
$exists: true,
$nin: ['scanning', 'scanned']
}
}
]
},
{
type: {
$in: [
'url',
],
},
},
],
});
if (monitors && monitors.length) {
for (const monitor of monitors) {
if (
monitor.pollTime.length === 0 ||
!monitor.pollTime.some(
pt => String(pt.lighthouseId) === String(lighthouseId)
)
) {
await MonitorModel.updateOne(
{ _id: monitor._id },
{ $push: { pollTime: { lighthouseId, date: newdate } } }
);
} else {
await MonitorModel.updateOne(
{
_id: monitor._id,
pollTime: {
$elemMatch: {
lighthouseId,
},
},
},
{ $set: { 'pollTime.$.date': newdate } }
);
}
}
return monitors;
} else {
return [];
}
return monitors;
} catch (error) {
ErrorService.log('monitorService.getUrlMonitors', error);
throw error;
@@ -1002,7 +956,7 @@ module.exports = {
}
},
addSeat: async function(query) {
addSeat: async function (query) {
try {
const project = await ProjectService.findOneBy(query);
let projectSeats = project.seats;
@@ -1027,7 +981,7 @@ module.exports = {
}
},
addSiteUrl: async function(query, data) {
addSiteUrl: async function (query, data) {
try {
let monitor = await this.findOneBy(query);
@@ -1053,7 +1007,7 @@ module.exports = {
}
},
removeSiteUrl: async function(query, data) {
removeSiteUrl: async function (query, data) {
try {
let monitor = await this.findOneBy(query);
const siteUrlIndex =
@@ -1082,7 +1036,7 @@ module.exports = {
}
},
hardDeleteBy: async function(query) {
hardDeleteBy: async function (query) {
try {
await MonitorModel.deleteMany(query);
return 'Monitor(s) removed successfully!';
@@ -1146,14 +1100,14 @@ module.exports = {
status = incidents.some(inc =>
inc.resolvedAt
? moment(inc.resolvedAt)
.utc()
.startOf('day')
.diff(
moment(temp.date)
.utc()
.startOf('day'),
'days'
) > 0
.utc()
.startOf('day')
.diff(
moment(temp.date)
.utc()
.startOf('day'),
'days'
) > 0
: true
)
? 'offline'
@@ -1171,23 +1125,23 @@ module.exports = {
);
const resolveddiff = inc.resolvedAt
? moment(temp.date)
.utc()
.startOf('day')
.diff(
moment(inc.resolvedAt)
.utc()
.startOf('day'),
'days'
)
.utc()
.startOf('day')
.diff(
moment(inc.resolvedAt)
.utc()
.startOf('day'),
'days'
)
: moment(temp.date)
.utc()
.startOf('day')
.diff(
moment()
.utc()
.startOf('day'),
'days'
);
.utc()
.startOf('day')
.diff(
moment()
.utc()
.startOf('day'),
'days'
);
if (creatediff > 0 && resolveddiff < 0) {
return 1440;
} else if (creatediff === 0 && resolveddiff !== 0) {
@@ -1226,7 +1180,7 @@ module.exports = {
}
},
restoreBy: async function(query) {
restoreBy: async function (query) {
const _this = this;
query.deleted = true;
const monitor = await _this.findBy(query);
@@ -1256,7 +1210,7 @@ module.exports = {
// checks if the monitor uptime stat is within the defined uptime on monitor sla
// then update the monitor => breachedMonitorSla
updateMonitorSlaStat: async function(query) {
updateMonitorSlaStat: async function (query) {
try {
const _this = this;
const currentDate = moment().format();
@@ -1324,7 +1278,7 @@ module.exports = {
}
},
calculateTime: async function(statuses, start, range) {
calculateTime: async function (statuses, start, range) {
const timeBlock = [];
let totalUptime = 0;
let totalTime = 0;
@@ -1391,8 +1345,8 @@ module.exports = {
moment(a.start).isSame(b.start)
? 0
: moment(a.start).isAfter(b.start)
? 1
: -1
? 1
: -1
);
//Third step
for (let i = 0; i < incidentsHappenedDuringTheDay.length - 1; i++) {
@@ -1528,7 +1482,7 @@ module.exports = {
return { timeBlock, uptimePercent: (totalUptime / totalTime) * 100 };
},
closeBreachedMonitorSla: async function(projectId, monitorId, userId) {
closeBreachedMonitorSla: async function (projectId, monitorId, userId) {
try {
const monitor = await MonitorModel.findOneAndUpdate(
{
@@ -1550,7 +1504,7 @@ module.exports = {
}
},
changeMonitorComponent: async function(projectId, monitorId, componentId) {
changeMonitorComponent: async function (projectId, monitorId, componentId) {
const monitor = await this.findOneBy({ _id: monitorId });
const component = await componentService.findOneBy({
_id: componentId,
@@ -1574,7 +1528,7 @@ module.exports = {
return updatedMonitor;
},
calcTime: async function(statuses, start, range) {
calcTime: async function (statuses, start, range) {
const timeBlock = [];
let totalUptime = 0;
let totalTime = 0;
@@ -1665,8 +1619,8 @@ module.exports = {
moment(a.start).isSame(b.start)
? 0
: moment(a.start).isAfter(b.start)
? 1
: -1
? 1
: -1
);
//Third step
for (let i = 0; i < incidentsHappenedDuringTheDay.length - 1; i++) {

View File

@@ -4,8 +4,8 @@ STRIPE_PUBLIC_KEY=pk_test_UynUDrFmbBmFVgJXd9EZCvBj00QAVpdwPv
AMPLITUDE_PUBLIC_KEY=4b76c47248f969446af69dfdbf687d90
SKIP_PREFLIGHT_CHECK=true
PUBLIC_URL=/dashboard
#REACT_APP_IS_SAAS_SERVICE=true
#IS_SAAS_SERVICE=true
REACT_APP_IS_SAAS_SERVICE=true
IS_SAAS_SERVICE=true
REACT_APP_VERSION=$npm_package_version
REACT_APP_PUSHNOTIFICATION_PUBLIC_KEY=BFAPbOTTU14VbTe_dnoYlVnOPLKUNm8GYmC50n3i4Ps64sk1Xqx8e894Clrscn1L2PsQ8-l4SsJVw7NRg4cx69Y
STATUSPAGE_DOMAIN=staging-statuspage.fyipe.com

View File

@@ -284,7 +284,7 @@ services:
- /usr/src/app/node_modules/
lighthouse-runner:
ports:
- '3001:3001'
- '3015:3015'
- '9241:9229' # Debugging port.
build:
context: ./lighthouse-runner
@@ -292,10 +292,8 @@ services:
env_file:
- ./lighthouse-runner/.env
environment:
- PORT=3001
- PORT=3015
- SERVER_URL=http://backend:3002
- LIGHTHOUSE_NAME=US
- LIGHTHOUSE_KEY=test-key
- IS_SAAS_SERVICE=${IS_SAAS_SERVICE}
volumes:
- ./lighthouse-runner:/usr/src/app

View File

@@ -194,12 +194,12 @@ services:
- backend
lighthouse-runner:
ports:
- '3001:3001'
- '3015:3015'
build: ./lighthouse-runner
env_file:
- ./lighthouse-runner/.env
environment:
- PORT=3001
- PORT=3015
- SERVER_URL=http://backend:3002
- IS_SAAS_SERVICE=${IS_SAAS_SERVICE}
depends_on:

View File

@@ -1,4 +1,2 @@
CLUSTER_KEY=f414c23b4cdf4e84a6a66ecfd528eff2
LIGHTHOUSE_NAME=US
LIGHTHOUSE_KEY=33b674ca-9fdd-11e9-a2a3-2a2ae2dbccez
SERVER_URL=http://localhost:3002

View File

@@ -29,8 +29,8 @@ RUN npm ci --only=production
COPY . /usr/src/app
# Expose ports.
# - 3001: Lighthouse Runner
EXPOSE 3001
# - 3015: Lighthouse Runner
EXPOSE 3015
#Run the app
CMD [ "npm", "start"]

View File

@@ -30,8 +30,8 @@ COPY ./package-lock.json /usr/src/app/package-lock.json
RUN npm ci
# Expose ports.
# - 3001: Lighthouse Runner
EXPOSE 3001
# - 3015: Lighthouse Runner
EXPOSE 3015
#Run the app
CMD [ "npm", "run", "dev"]

View File

@@ -35,14 +35,12 @@ const config = require('./utils/config');
const cronMinuteStartTime = Math.floor(Math.random() * 50);
app.use(cors());
app.set('port', process.env.PORT || 3001);
app.set('port', process.env.PORT || 3015);
http.listen(app.get('port'), function() {
// eslint-disable-next-line
console.log(
`Lighthouse with Lighthouse Name ${config.lighthouseName} and Lighthouse Key ${
config.lighthouseKey
} Started on port ${app.get('port')}. Fyipe API URL: ${
`Lighthouse Started on port ${app.get('port')}. Fyipe API URL: ${
config.serverUrl
}`
);

View File

@@ -7,10 +7,7 @@ const _this = {
'Access-Control-Allow-Origin': '*',
Accept: 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
lighthouseName: config.lighthouseName,
lighthouseKey: config.lighthouseKey,
clusterKey: config.clusterKey,
lighthouseVersion: config.lighthouseVersion,
};
},
postApi: (url, data) => {

View File

@@ -39,8 +39,6 @@ const COMMAND = {
module.exports = {
COMMAND,
serverUrl: process.env['SERVER_URL'],
lighthouseName: process.env['LIGHTHOUSE_NAME'],
lighthouseKey: process.env['LIGHTHOUSE_KEY'],
clusterKey: process.env['CLUSTER_KEY'],
lighthouseVersion: packageJson.version,
};

View File

@@ -9,9 +9,11 @@ module.exports = {
monitors = JSON.parse(monitors.data); // parse the stringified data
await Promise.all(
monitors.map(monitor => {
// console.log("The received monitor: ", monitor);
if(monitor.type === 'url' && monitor.pollTime && monitor.pollTime.length > 0){
const probe = monitor.pollTime.filter(probe => probe.probeId);
if(probe.length > 0){ // This checks that the probe server is working
console.log("The received monitor after probe check: ", monitor);
return UrlMonitors.ping(monitor);
}else{
ErrorService.log('getApi',"Please Make Sure Probe Server is Online.")

View File

@@ -48,11 +48,12 @@ module.exports = {
let failedCount = 0;
for (const url of sites) {
try {
const resp = await lighthouseFetch(
let resp = await lighthouseFetch(
monitor,
url
);
resp.lighthouseScanStatus = resp.status
console.log("Response from lighthouse fetch: ", resp);
await UrlService.ping(monitor._id, {
monitor,
resp,
@@ -66,15 +67,6 @@ module.exports = {
}
}
await UrlService.ping(monitor._id, {
monitor,
resp: {
lighthouseScanStatus:
failedCount === sites.length
? 'failed'
: 'scanned',
},
});
}
}
}

View File

@@ -42,7 +42,6 @@ module.exports = {
retryCount++;
}
}
// Lighthouse Refactored
}
}
} catch (error) {