Update lighthouse workflow

This commit is contained in:
Ibukun Dairo
2020-06-11 08:42:19 +01:00
parent 7cfec2d26c
commit b72ed4bcd2
17 changed files with 326 additions and 92 deletions

View File

@@ -469,6 +469,28 @@ router.post(
}
);
// Route
// Description: Get all Lighthouse Logs by monitorId
router.get(
'/:projectId/lighthouseLog/:monitorId',
getUser,
isAuthorized,
async function(req, res) {
try {
const { skip, limit } = req.query;
const monitorId = req.params.monitorId;
const lighthouseLogs = await MonitorService.getLighthouseLogs(
monitorId,
limit || 10,
skip || 0
);
return sendListResponse(req, res, lighthouseLogs);
} catch (error) {
return sendErrorResponse(req, res, error);
}
}
);
router.post(
'/:projectId/inbound/:deviceId',
getUser,

View File

@@ -136,7 +136,8 @@ router.post('/ping/:monitorId', isAuthorizedProbe, async function(
resp && resp.bestPractices ? resp.bestPractices : null;
data.seo = resp && resp.seo ? resp.seo : null;
data.pwa = resp && resp.pwa ? resp.pwa : null;
data.data = resp && resp.data ? resp.data : null;
data.lighthouseData =
resp && resp.lighthouseData ? resp.lighthouseData : null;
if (data.lighthouseScanStatus) {
if (data.lighthouseScanStatus === 'scanning') {
@@ -157,7 +158,7 @@ router.post('/ping/:monitorId', isAuthorizedProbe, async function(
);
}
} else {
if (data.data) {
if (data.lighthouseData) {
log = await ProbeService.saveLighthouseLog(data);
} else {
log = await ProbeService.saveMonitorLog(data);

View File

@@ -4,7 +4,8 @@ const Schema = mongoose.Schema;
const lighthouseLogSchema = new Schema({
monitorId: { type: String, ref: 'Monitor' }, // which monitor does this belong to.
probeId: { type: String, ref: 'Probe' }, // which probe does this belong to.
data: Object,
data: [Object],
url: String,
performance: Number,
accessibility: Number,
bestPractices: Number,

View File

@@ -55,6 +55,7 @@ const monitorSchema = new Schema({
lighthouseScannedAt: { type: Date },
lighthouseScanStatus: String,
lighthouseScannedBy: { type: String, ref: 'Probe' },
sitePages: [String],
});
monitorSchema.virtual('project', {

View File

@@ -5,7 +5,8 @@ module.exports = {
Log.monitorId = data.monitorId;
Log.probeId = data.probeId;
Log.data = data.data;
Log.data = data.lighthouseData.issues;
Log.url = data.lighthouseData.url;
Log.performance = data.performance;
Log.accessibility = data.accessibility;
Log.bestPractices = data.bestPractices;

View File

@@ -596,6 +596,24 @@ module.exports = {
}
},
async getLighthouseLogs(monitorId, limit, skip) {
try {
if (typeof limit === 'string') limit = parseInt(limit);
if (typeof skip === 'string') skip = parseInt(skip);
const lighthouseLogs = await LighthouseLogService.findBy(
{ monitorId },
limit,
skip
);
return lighthouseLogs;
} catch (error) {
ErrorService.log('monitorService.getLighthouseLogs', error);
throw error;
}
},
addSeat: async function(query) {
try {
const project = await ProjectService.findOneBy(query);
@@ -812,6 +830,7 @@ const MonitorModel = require('../models/monitor');
const ProbeService = require('./probeService');
const MonitorStatusService = require('./monitorStatusService');
const MonitorLogService = require('./monitorLogService');
const LighthouseLogService = require('./lighthouseLogService');
const MonitorLogByHourService = require('./monitorLogByHourService');
const MonitorLogByDayService = require('./monitorLogByDayService');
const MonitorLogByWeekService = require('./monitorLogByWeekService');

View File

@@ -647,6 +647,66 @@ export function getMonitorLogsFailure(error) {
};
}
// Fetch Lighthouse Logs list
export function fetchLighthouseLogs(projectId, monitorId, skip, limit) {
return function(dispatch) {
const promise = getApi(
`monitor/${projectId}/lighthouseLog/${monitorId}?limit=${limit}&skip=${skip}`
);
dispatch(fetchLighthouseLogsRequest());
promise.then(
function(lighthouseLogs) {
dispatch(
fetchLighthouseLogsSuccess({
projectId,
monitorId,
logs: lighthouseLogs.data,
skip,
limit,
count: lighthouseLogs.data.count,
})
);
},
function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
if (error && error.message) {
error = error.message;
} else {
error = 'Network Error';
}
dispatch(fetchLighthouseLogsFailure(errors(error)));
}
);
return promise;
};
}
export function fetchLighthouseLogsRequest() {
return {
type: types.FETCH_LIGHTHOUSE_LOGS_REQUEST,
};
}
export function fetchLighthouseLogsSuccess(lighthouseLogs) {
return {
type: types.FETCH_LIGHTHOUSE_LOGS_SUCCESS,
payload: lighthouseLogs,
};
}
export function fetchLighthouseLogsFailure(error) {
return {
type: types.FETCH_LIGHTHOUSE_LOGS_FAILURE,
payload: error,
};
}
export function addSeat(projectId) {
return function(dispatch) {
const promise = postApi(`monitor/${projectId}/addseat`, {});

View File

@@ -70,6 +70,15 @@ export function updatemonitorstatusbysocket(status, probes) {
};
}
export function updatelighthouselogbysocket(log) {
return function(dispatch) {
dispatch({
type: 'UPDATE_LIGHTHOUSE_LOG',
payload: log,
});
};
}
export function updateprobebysocket(probe) {
return function(dispatch) {
dispatch({

View File

@@ -17,6 +17,7 @@ import {
incidentcreatedbysocket,
updatemonitorlogbysocket,
updatemonitorstatusbysocket,
updatelighthouselogbysocket,
updateprobebysocket,
addnotifications,
teamMemberRoleUpdate,
@@ -58,6 +59,9 @@ class SocketApp extends Component {
socket.removeListener(
`updateMonitorStatus-${this.props.project._id}`
);
socket.removeListener(
`updateLighthouseLog-${this.props.project._id}`
);
socket.removeListener(`updateProbe-${this.props.project._id}`);
socket.removeListener(
`NewNotification-${this.props.project._id}`
@@ -332,6 +336,29 @@ class SocketApp extends Component {
);
}
});
socket.on(`updateLighthouseLog-${this.props.project._id}`, function(
data
) {
const isUserInProject = thisObj.props.project
? thisObj.props.project.users.some(
user => user.userId === loggedInUser
)
: false;
if (isUserInProject) {
thisObj.props.updatelighthouselogbysocket(data);
} else {
const subProject = thisObj.props.subProjects.find(
subProject => subProject._id === data.projectId
);
const isUserInSubProject = subProject
? subProject.users.some(
user => user.userId === loggedInUser
)
: false;
if (isUserInSubProject)
thisObj.props.updatelighthouselogbysocket(data);
}
});
socket.on(`updateProbe-${this.props.project._id}`, function(data) {
const isUserInProject = thisObj.props.project
? thisObj.props.project.users.some(
@@ -502,6 +529,7 @@ const mapDispatchToProps = dispatch =>
incidentcreatedbysocket,
updatemonitorlogbysocket,
updatemonitorstatusbysocket,
updatelighthouselogbysocket,
updateprobebysocket,
addnotifications,
teamMemberRoleUpdate,

View File

@@ -109,6 +109,11 @@ export function MonitorChart({
const type = monitor.type;
const checkLogs = data && data.length > 0;
const lighthouseLogs =
monitor.lighthouseLogs && monitor.lighthouseLogs.data
? monitor.lighthouseLogs.data
: [];
const sslCertificate = checkLogs ? data[0].sslCertificate : null;
const sslCertExpiringIn = moment(
new Date(
@@ -825,10 +830,10 @@ export function MonitorChart({
<small
id={`lighthouse-performance-${monitor.name}`}
>
{monitor.lighthouseScores &&
monitor.lighthouseScores
{lighthouseLogs[0] &&
lighthouseLogs[0]
.performance
? `${monitor.lighthouseScores.performance}%`
? `${lighthouseLogs[0].performance}%`
: '-'}
</small>
</span>
@@ -860,10 +865,10 @@ export function MonitorChart({
<small
id={`lighthouse-accessibility-${monitor.name}`}
>
{monitor.lighthouseScores &&
monitor.lighthouseScores
{lighthouseLogs[0] &&
lighthouseLogs[0]
.accessibility
? `${monitor.lighthouseScores.accessibility}%`
? `${lighthouseLogs[0].accessibility}%`
: '-'}
</small>
</span>
@@ -895,10 +900,10 @@ export function MonitorChart({
<small
id={`lighthouse-bestPractices-${monitor.name}`}
>
{monitor.lighthouseScores &&
monitor.lighthouseScores
{lighthouseLogs[0] &&
lighthouseLogs[0]
.bestPractices
? `${monitor.lighthouseScores.bestPractices}%`
? `${lighthouseLogs[0].bestPractices}%`
: '-'}
</small>
</span>
@@ -930,10 +935,9 @@ export function MonitorChart({
<small
id={`lighthouse-seo-${monitor.name}`}
>
{monitor.lighthouseScores &&
monitor.lighthouseScores
.seo
? `${monitor.lighthouseScores.seo}%`
{lighthouseLogs[0] &&
lighthouseLogs[0].seo
? `${lighthouseLogs[0].seo}%`
: '-'}
</small>
</span>
@@ -965,10 +969,9 @@ export function MonitorChart({
<small
id={`lighthouse-pwa-${monitor.name}`}
>
{monitor.lighthouseScores &&
monitor.lighthouseScores
.pwa
? `${monitor.lighthouseScores.pwa}%`
{lighthouseLogs[0] &&
lighthouseLogs[0].pwa
? `${lighthouseLogs[0].pwa}%`
: '-'}
</small>
</span>

View File

@@ -59,6 +59,11 @@ export const GET_MONITOR_LOGS_SUCCESS = 'GET_MONITOR_LOGS_SUCCESS';
export const GET_MONITOR_LOGS_FAILURE = 'GET_MONITOR_LOGS_FAILURE';
export const GET_MONITOR_LOGS_RESET = 'GET_MONITOR_LOGS_RESET';
// Fetch Lighthouse Logs
export const FETCH_LIGHTHOUSE_LOGS_REQUEST = 'FETCH_LIGHTHOUSE_LOGS_REQUEST';
export const FETCH_LIGHTHOUSE_LOGS_SUCCESS = 'FETCH_LIGHTHOUSE_LOGS_SUCCESS';
export const FETCH_LIGHTHOUSE_LOGS_FAILURE = 'FETCH_LIGHTHOUSE_LOGS_FAILURE';
// Fetch Monitor Criteria
export const FETCH_MONITOR_CRITERIA_REQUEST = 'FETCH_MONITOR_CRITERIA_REQUEST';
export const FETCH_MONITOR_CRITERIA_SUCCESS = 'FETCH_MONITOR_CRITERIA_SUCCESS';

View File

@@ -16,6 +16,7 @@ import {
fetchMonitorLogs,
fetchMonitorsIncidents,
fetchMonitorStatuses,
fetchLighthouseLogs,
} from '../actions/monitor';
import { loadPage } from '../actions/page';
import { fetchTutorial } from '../actions/tutorial';
@@ -59,6 +60,12 @@ class DashboardView extends Component {
this.props.startDate,
this.props.endDate
);
this.props.fetchLighthouseLogs(
monitor.projectId._id || monitor.projectId,
monitor._id,
0,
1
);
});
}
});
@@ -95,6 +102,12 @@ class DashboardView extends Component {
this.props.startDate,
this.props.endDate
);
this.props.fetchLighthouseLogs(
monitor.projectId._id || monitor.projectId,
monitor._id,
0,
1
);
});
}
});
@@ -380,6 +393,7 @@ const mapDispatchToProps = dispatch => {
fetchMonitorLogs,
fetchMonitorsIncidents,
fetchMonitorStatuses,
fetchLighthouseLogs,
loadPage,
fetchTutorial,
getProbes,
@@ -451,6 +465,7 @@ DashboardView.propTypes = {
fetchMonitorLogs: PropTypes.func,
fetchMonitorsIncidents: PropTypes.func.isRequired,
fetchMonitorStatuses: PropTypes.func.isRequired,
fetchLighthouseLogs: PropTypes.func.isRequired,
subProjects: PropTypes.array,
monitorTutorial: PropTypes.object,
getProbes: PropTypes.func,

View File

@@ -30,6 +30,9 @@ import {
FETCH_MONITOR_STATUSES_REQUEST,
FETCH_MONITOR_STATUSES_SUCCESS,
FETCH_MONITOR_STATUSES_FAILURE,
FETCH_LIGHTHOUSE_LOGS_REQUEST,
FETCH_LIGHTHOUSE_LOGS_SUCCESS,
FETCH_LIGHTHOUSE_LOGS_FAILURE,
FETCH_MONITOR_CRITERIA_REQUEST,
FETCH_MONITOR_CRITERIA_SUCCESS,
FETCH_MONITOR_CRITERIA_FAILURE,
@@ -83,6 +86,7 @@ const INITIAL_STATE = {
activeProbe: 0,
fetchMonitorLogsRequest: false,
fetchMonitorStatusesRequest: false,
fetchLighthouseLogsRequest: false,
fetchMonitorCriteriaRequest: false,
fetchMonitorsSubscriberRequest: false,
deleteMonitor: false,
@@ -242,6 +246,9 @@ export default function monitor(state = INITIAL_STATE, action) {
newMonitor.logs = oldMonitor.logs;
if (!newMonitor.statuses)
newMonitor.statuses = oldMonitor.statuses;
if (!newMonitor.lighthouseLogs)
newMonitor.lighthouseLogs =
oldMonitor.lighthouseLogs;
if (!newMonitor.incidents)
newMonitor.incidents = oldMonitor.incidents;
if (!newMonitor.subscribers)
@@ -527,6 +534,55 @@ export default function monitor(state = INITIAL_STATE, action) {
fetchMonitorStatusesRequest: false,
});
case FETCH_LIGHTHOUSE_LOGS_REQUEST:
return Object.assign({}, state, {
fetchLighthouseLogsRequest: true,
});
case FETCH_LIGHTHOUSE_LOGS_SUCCESS:
return Object.assign({}, state, {
monitorsList: {
...state.monitorsList,
requesting: false,
error: null,
success: true,
monitors: state.monitorsList.monitors.map(monitor => {
monitor.monitors =
monitor._id === action.payload.projectId
? monitor.monitors.map(monitor => {
if (
monitor._id ===
action.payload.monitorId
) {
monitor.lighthouseLogs = {
data: action.payload.logs.data,
skip: action.payload.skip,
limit: action.payload.limit,
count: action.payload.count,
};
return monitor;
} else {
return monitor;
}
})
: monitor.monitors;
return monitor;
}),
},
fetchLighthouseLogsRequest: false,
});
case FETCH_LIGHTHOUSE_LOGS_FAILURE:
return Object.assign({}, state, {
monitorsList: {
...state.monitorsList,
requesting: false,
error: action.payload,
success: false,
},
fetchLighthouseLogsRequest: false,
});
case 'UPDATE_DATE_RANGE':
return Object.assign({}, state, {
monitorsList: {
@@ -884,6 +940,58 @@ export default function monitor(state = INITIAL_STATE, action) {
fetchMonitorStatusesRequest: false,
});
case 'UPDATE_LIGHTHOUSE_LOG':
return Object.assign({}, state, {
monitorsList: {
...state.monitorsList,
requesting: false,
error: null,
success: true,
monitors: state.monitorsList.monitors.map(monitor => {
monitor.monitors =
monitor._id === action.payload.projectId
? monitor.monitors.map(monitor => {
if (
monitor._id ===
action.payload.monitorId
) {
if (
monitor.lighthouseLogs &&
monitor.lighthouseLogs.data
) {
monitor.lighthouseLogs = {
...monitor.lighthouseLogs,
data: [
action.payload.data,
...monitor.lighthouseLogs
.data,
],
count:
monitor.lighthouseLogs
.count + 1,
};
} else {
monitor.lighthouseLogs = {
data: [action.payload.data],
skip: 0,
limit: 1,
count: 1,
};
}
return monitor;
} else {
return monitor;
}
})
: monitor.monitors;
return monitor;
}),
},
fetchLighthouseLogsRequest: false,
});
case FETCH_MONITOR_CRITERIA_REQUEST:
return Object.assign({}, state, {
fetchMonitorCriteriaRequest: action.payload,

View File

@@ -578,11 +578,6 @@
"delayed-stream": "~1.0.0"
}
},
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -2333,11 +2328,6 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
@@ -2439,27 +2429,6 @@
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz",
"integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA=="
},
"sitemap-stream-parser": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/sitemap-stream-parser/-/sitemap-stream-parser-1.7.0.tgz",
"integrity": "sha512-aGNRTohb0G9uhrS04C6NlTBRdCK7XzWpWEXKN5cUjiYkhbea+g6FWm3Js24Kw8EM+ryeZLM5fCPnPEEufwW4Hw==",
"requires": {
"async": "^2.6.1",
"commander": "^2.15.1",
"request": "^2.87.0",
"sax": "^1.2.4"
},
"dependencies": {
"async": {
"version": "2.6.3",
"resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
"integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
"requires": {
"lodash": "^4.17.14"
}
}
}
},
"source-map": {
"version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",

View File

@@ -31,7 +31,6 @@
"moment": "^2.24.0",
"node-cron": "^2.0.3",
"node-fetch": "^2.6.0",
"sitemap-stream-parser": "^1.7.0",
"winston": "^2.4.0",
"winston-slack-transport": "^2.0.0"
},

View File

@@ -19,7 +19,7 @@ process.on('message', url => {
launchChromeAndRunLighthouse(url)
.then(results => {
const result = {
data: { url },
lighthouseData: { url, issues: [] },
performance: Math.ceil(
results.categories.performance.score * 100
),

View File

@@ -5,7 +5,6 @@ const fetch = require('node-fetch');
const sslCert = require('get-ssl-certificate');
const { fork } = require('child_process');
const moment = require('moment');
const sitemap = require('sitemap-stream-parser');
// it collects all monitors then ping them one by one to store their response
// checks if the website of the url in the monitors is up or down
@@ -25,55 +24,49 @@ module.exports = {
)
: -1;
if (
(!monitor.lighthouseScannedAt ||
(monitor.lighthouseScanStatus &&
monitor.lighthouseScanStatus === 'failed') ||
((!monitor.lighthouseScannedAt ||
scanIntervalInDays > 0) &&
(!monitor.lighthouseScanStatus ||
monitor.lighthouseScanStatus !== 'scanning')
(!monitor.lighthouseScanStatus ||
monitor.lighthouseScanStatus !== 'scanning'))
) {
await ApiService.ping(monitor._id, {
monitor,
resp: { lighthouseScanStatus: 'scanning' },
});
const urlObject = new URL(monitor.data.url);
const sites = [];
sitemap.parseSitemaps(
`${urlObject.origin}/sitemap.xml`,
url => {
sites.push(url);
},
async err => {
if (err || sites.length === 0)
sites.push(monitor.data.url);
let resp = {};
for (const url of sites) {
try {
resp = await lighthouseFetch(
monitor,
url
);
await ApiService.ping(monitor._id, {
monitor,
resp,
});
} catch (error) {
resp = error;
ErrorService.log(
'lighthouseFetch',
error.error
);
}
}
const sites = [monitor.data.url, ...monitor.sitePages];
let failedCount = 0;
for (const url of sites) {
try {
const resp = await lighthouseFetch(
monitor,
url
);
await ApiService.ping(monitor._id, {
monitor,
resp: { lighthouseScanStatus: 'scanned' },
resp,
});
} catch (error) {
failedCount++;
ErrorService.log(
'lighthouseFetch',
error.error
);
}
);
}
await ApiService.ping(monitor._id, {
monitor,
resp: {
lighthouseScanStatus:
failedCount === sites.length
? 'failed'
: 'scanned',
},
});
}
await ApiService.ping(monitor._id, {