fix compile

This commit is contained in:
Nawaz Dhandala
2022-04-25 19:32:13 +01:00
parent 97bf4bf1bb
commit a6b697be4c
39 changed files with 1889 additions and 4 deletions

View File

@@ -1,3 +0,0 @@
# Readme.
This folder contains legacy code which might be useful in the future. This is just a temporary repo for dead code.

124
Probe/Build-temp/dist/Utils/api.js vendored Normal file
View File

@@ -0,0 +1,124 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const axios_1 = __importDefault(require("axios"));
const config_1 = __importDefault(require("./config"));
const _this = {
getHeaders: () => {
return {
'Access-Control-Allow-Origin': '*',
Accept: 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
probeName: config_1.default.probeName,
probeKey: config_1.default.probeKey,
clusterKey: config_1.default.clusterKey,
probeVersion: config_1.default.probeVersion,
};
},
post: (url, data) => {
const headers = this.getHeaders();
return new Promise((resolve, reject) => {
/*
* Error [ERR_FR_MAX_BODY_LENGTH_EXCEEDED]: Request body larger than maxBodyLength limit
* https://stackoverflow.com/questions/58655532/increasing-maxcontentlength-and-maxbodylength-in-axios
*/
(0, axios_1.default)({
method: 'POST',
url: `${config_1.default.dataIngestorUrl}/${url}`,
headers,
data,
maxContentLength: Infinity,
maxBodyLength: Infinity,
})
.then((response) => {
resolve(response.data);
})
.then((error) => {
if (error && error.response && error.response.data) {
error = error.response.data;
}
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
get: (url, limit = 10) => {
const headers = this.getHeaders();
return new Promise((resolve, reject) => {
(0, axios_1.default)({
method: 'GET',
url: `${config_1.default.probeApiUrl}/${url}?limit=${limit}`,
headers,
})
.then((response) => {
resolve(response.data);
})
.then((error) => {
if (error && error.response && error.response.data) {
error = error.response.data;
}
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
put: (url, data) => {
const headers = this.getHeaders();
return new Promise((resolve, reject) => {
/*
* Error [ERR_FR_MAX_BODY_LENGTH_EXCEEDED]: Request body larger than maxBodyLength limit
* https://stackoverflow.com/questions/58655532/increasing-maxcontentlength-and-maxbodylength-in-axios
*/
(0, axios_1.default)({
method: 'PUT',
url: `${config_1.default.dataIngestorUrl}/${url}`,
headers,
data,
maxContentLength: Infinity,
maxBodyLength: Infinity,
})
.then((response) => {
resolve(response.data);
})
.then((error) => {
if (error && error.response && error.response.data) {
error = error.response.data;
}
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
delete: (url, data) => {
const headers = this.getHeaders();
return new Promise((resolve, reject) => {
(0, axios_1.default)({
method: 'DELETE',
url: `${config_1.default.dataIngestorUrl}/${url}`,
headers,
data,
})
.then((response) => {
resolve(response.data);
})
.then((error) => {
if (error && error.response && error.response.data) {
error = error.response.data;
}
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
};
exports.default = _this;

View File

@@ -0,0 +1,79 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const api_1 = __importDefault(require("./api"));
exports.default = {
headers: (val, type) => __awaiter(void 0, void 0, void 0, function* () {
const header = {};
if (type && type.length) {
header['Content-Type'] = type;
}
if (val && val.length) {
val.forEach((head) => {
header[head.key] = head.value;
});
}
return header;
}),
body: (val, type) => __awaiter(void 0, void 0, void 0, function* () {
let bodyContent = {};
if (type && type === 'formData' && val && val[0] && val[0].key) {
val.forEach((bod) => {
bodyContent[bod.key] = bod.value;
});
bodyContent = JSON.stringify(bodyContent);
}
else if (type && type === 'text' && val && val.length) {
bodyContent = val;
}
return bodyContent;
}),
setMonitorTime: function (monitorId, responseTime, responseStatus, status) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post(`probe/setTime/${monitorId}`, {
responseTime,
responseStatus,
status,
});
});
},
getMonitorTime: function (monitorId, date) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post(`probe/getTime/${monitorId}`, { date });
});
},
ping: function (monitorId, data) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post(`probe/ping/${monitorId}`, data);
});
},
setScanStatus: function (monitorIds, status) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post('probe/set-scan-status', {
scanning: status,
monitorIds,
});
});
},
addProbeScan: function (monitorIds) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post('probe/add-probe-scan', { monitorIds });
});
},
removeProbeScan: function (monitorIds) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post('probe/remove-probe-scan', { monitorIds });
});
},
};

51
Probe/Build-temp/dist/Utils/config.js vendored Normal file
View File

@@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const package_json_1 = __importDefault(require("../package.json"));
const COMMAND = {
linux: {
load: "top -b -n 2 | egrep --color 'load average|%Cpu'",
cpu: "egrep --color 'processor|cores' /proc/cpuinfo",
mem: "egrep --color 'Mem|Swap' /proc/meminfo",
disk: "df -h | egrep --color '/dev/xvda1|/dev/sda7|/dev/nvme0n1p1'",
temp: "sensors | egrep --color 'CPU'",
},
darwin: {
load: "top -l 1 | egrep --color 'Load Avg|CPU usage'",
cpu: 'sysctl -n machdep.cpu.core_count',
mem: {
used: "top -l 1 | egrep --color 'PhysMem'",
total: 'sysctl -n hw.memsize',
swap: 'sysctl -n vm.swapusage',
},
disk: "df -h | egrep --color '/dev/disk1s2'",
temp: 'sysctl -n machdep.xcpm.cpu_thermal_level',
},
win: {
load: 'wmic cpu get loadpercentage',
cpu: 'wmic cpu get numberofcores',
mem: {
free: 'wmic os get freephysicalmemory',
total: 'wmic computersystem get totalphysicalmemory',
totalSwap: 'wmic os get totalvirtualmemorySize',
freeSwap: 'wmic os get freevirtualmemory',
},
disk: {
total: 'wmic logicaldisk get size',
free: 'wmic logicaldisk get freespace',
},
temp: 'wmic computersystem get thermalstate',
},
};
exports.default = {
COMMAND,
serverUrl: process.env['SERVER_URL'],
probeName: process.env['PROBE_NAME'],
probeKey: process.env['PROBE_KEY'],
clusterKey: process.env['CLUSTER_KEY'],
probeVersion: package_json_1.default.version,
dataIngestorUrl: process.env['DATA_INGESTOR_URL'],
probeApiUrl: process.env['PROBE_API_URL'],
};

View File

@@ -0,0 +1,22 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const api_1 = __importDefault(require("./api"));
exports.default = {
scan: function (security) {
return __awaiter(this, void 0, void 0, function* () {
return yield api_1.default.post(`probe/scan/docker`, { security });
});
},
};

View File

@@ -0,0 +1,70 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const util_1 = require("util");
const readdir = (0, util_1.promisify)(fs_1.default.readdir);
const rmdir = (0, util_1.promisify)(fs_1.default.rmdir);
const unlink = (0, util_1.promisify)(fs_1.default.unlink);
/**
* @description a promise based utility to read content of a file
* @param {string} filePath path to file
*/
function readFileContent(filePath) {
return new Promise((resolve, reject) => {
if (fs_1.default.existsSync(filePath)) {
fs_1.default.readFile(filePath, { encoding: 'utf8' }, (error, data) => {
if (error) {
reject(error);
}
resolve(data);
});
}
});
}
/**
* @description an asynchronous function to handle deleting a file
* @param {string} file path to file
*/
function deleteFile(file) {
return __awaiter(this, void 0, void 0, function* () {
if (fs_1.default.existsSync(file)) {
yield unlink(file);
}
});
}
/**
* @description a promise based utility to handle deleting a folder and it's content
* @param {string} dir directory with or without file
*/
function deleteFolderRecursive(dir) {
return __awaiter(this, void 0, void 0, function* () {
if (fs_1.default.existsSync(dir)) {
const entries = yield readdir(dir, { withFileTypes: true });
yield Promise.all(entries.map((entry) => {
const fullPath = path_1.default.join(dir, entry.name);
return entry.isDirectory()
? deleteFolderRecursive(fullPath)
: unlink(fullPath);
}));
yield rmdir(dir); // Finally remove now empty directory
}
});
}
exports.default = {
readFileContent,
deleteFile,
deleteFolderRecursive,
};

144
Probe/Build-temp/dist/Utils/pingFetch.js vendored Normal file
View File

@@ -0,0 +1,144 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const Logger_1 = __importDefault(require("CommonServer/Utils/Logger"));
const node_fetch_commonjs_1 = __importDefault(require("node-fetch-commonjs"));
const get_ssl_certificate_1 = __importDefault(require("get-ssl-certificate"));
const https_1 = __importDefault(require("https"));
const http_1 = __importDefault(require("http"));
const httpsAgent = new https_1.default.Agent({
rejectUnauthorized: false,
});
const httpAgent = new http_1.default.Agent();
const pingfetch = (url, method, body, headers) => __awaiter(void 0, void 0, void 0, function* () {
const now = new Date().getTime();
let resp, res, response;
if (!method) {
method = 'GET';
}
try {
Logger_1.default.info(`Ping Start: ${method} ${url}`);
let sslCertificate, data;
const urlObject = new URL(url);
const payload = {
method: method,
timeout: 30000,
};
if (headers && Object.keys(headers).length > 0) {
payload.headers = headers;
}
if (body && Object.keys(body).length > 0) {
payload.body = body;
}
try {
/*
* Try with a normal http / https agent.
* If this fails we'll try with an agent which has
* {
* rejectUnauthorized: false,
* }
*
* to check for self-signed SSL certs.
*/
response = yield (0, node_fetch_commonjs_1.default)(url, Object.assign({}, payload));
Logger_1.default.info(`Response Recieved: ${method} ${url}`);
res = new Date().getTime() - now;
try {
/*
* Try getting response json body
* If this fails, body is either empty or not valid json
* and data should return null
*/
data = yield response.json();
}
catch (e) {
//
}
if (urlObject.protocol === 'https:') {
const certificate = yield get_ssl_certificate_1.default.get(urlObject.hostname);
if (certificate) {
sslCertificate = {
issuer: certificate.issuer,
expires: certificate.valid_to,
fingerprint: certificate.fingerprint,
selfSigned: false,
};
}
}
}
catch (e) {
/*
* Retry with an agent which has
*
* {
* rejectUnauthorized: false,
* }
*
* to check for self-signed SSL certs.
*/
Logger_1.default.info(`Retrying: ${method} ${url}`);
response = yield (0, node_fetch_commonjs_1.default)(url, Object.assign(Object.assign({}, payload), (url.startsWith('https')
? { agent: httpsAgent }
: { agent: httpAgent })));
res = new Date().getTime() - now;
Logger_1.default.info(`Response Recieved: ${method} ${url}`);
try {
/*
* Try getting response json body
* If this fails, body is either empty or not valid json
* and data should return null
*/
data = yield response.json();
}
catch (e) {
//
}
if (urlObject.protocol === 'https:') {
const certificate = yield get_ssl_certificate_1.default.get(urlObject.hostname);
if (certificate) {
sslCertificate = {
issuer: certificate.issuer,
expires: certificate.valid_to,
fingerprint: certificate.fingerprint,
selfSigned: e.code === 'DEPTH_ZERO_SELF_SIGNED_CERT',
};
}
}
}
Logger_1.default.info(`Ping End: ${method} ${url}`);
resp = { status: response.status, body: data, sslCertificate };
}
catch (error) {
res = new Date().getTime() - now;
resp = { status: 408, body: error };
}
return {
res,
resp,
rawResp: {
ok: response && response.ok ? response.ok : null,
status: response && response.status
? response.status
: resp && resp.status
? resp.status
: null,
statusText: response && response.statusText ? response.statusText : null,
headers: response && response.headers && response.headers.raw()
? response.headers.raw()
: null,
body: resp && resp.body ? resp.body : null,
},
};
});
exports.default = pingfetch;

36
Probe/Build-temp/dist/index.js vendored Normal file
View File

@@ -0,0 +1,36 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
require("CommonServer/utils/env");
require("CommonServer/utils/process");
const await_sleep_1 = __importDefault(require("await-sleep"));
const main_1 = __importDefault(require("./workers/main"));
const config_1 = __importDefault(require("./utils/config"));
const Logger_1 = __importDefault(require("CommonServer/Utils/Logger"));
const cronMinuteStartTime = Math.floor(Math.random() * 50);
setTimeout(() => __awaiter(void 0, void 0, void 0, function* () {
// Keep monitoring in an infinate loop.
//eslint-disable-next-line no-constant-condition
while (true) {
try {
yield main_1.default.runJob();
}
catch (error) {
Logger_1.default.error(error);
Logger_1.default.info('Sleeping for 30 seconds...');
yield (0, await_sleep_1.default)(30 * 1000);
}
}
}), cronMinuteStartTime * 1000);
Logger_1.default.info(`Probe with Probe Name ${config_1.default.probeName} and Probe Key ${config_1.default.probeKey}. OneUptime Probe API URL: ${config_1.default.probeApiUrl}`);

View File

@@ -0,0 +1,57 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const apiService_1 = __importDefault(require("../Utils/apiService"));
const pingFetch_1 = __importDefault(require("../Utils/pingFetch"));
const Logger_1 = __importDefault(require("CommonServer/Utils/Logger"));
/*
* It collects all monitors then ping them one by one to store their response
* Checks if the website of the url in the monitors is up or down
* Creates incident if a website is down and resolves it when they come back up
*/
exports.default = {
ping: ({ monitor }) => __awaiter(void 0, void 0, void 0, function* () {
if (monitor && monitor.type) {
if (monitor.data.url) {
const headers = yield apiService_1.default.headers(monitor.headers, monitor.bodyType);
const body = yield apiService_1.default.body(monitor && monitor.text && monitor.text.length
? monitor.text
: monitor.formData, monitor && monitor.text && monitor.text.length
? 'text'
: 'formData');
let retry = true;
let retryCount = 0;
while (retry || retryCount > 2) {
const { res, resp, rawResp } = yield (0, pingFetch_1.default)(monitor.data.url, monitor.method, body, headers);
Logger_1.default.info(`Monitor ID ${monitor._id}: Start saving data to ingestor.`);
const response = yield apiService_1.default.ping(monitor._id, {
monitor,
res,
resp,
rawResp,
type: monitor.type,
retryCount,
});
Logger_1.default.info(`Monitor ID ${monitor._id}: End saving data to ingestor.`);
if (response && !response.retry) {
retry = false;
}
else {
retryCount++;
}
}
}
}
}),
};

View File

@@ -0,0 +1,20 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const containerService_1 = __importDefault(require("../Utils/containerService"));
exports.default = {
scan: (security) => __awaiter(void 0, void 0, void 0, function* () {
yield containerService_1.default.scan(security);
}),
};

View File

@@ -0,0 +1,50 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const moment_1 = __importDefault(require("moment"));
const apiService_1 = __importDefault(require("../Utils/apiService"));
/*
* It collects all IOT device monitors then check the last time they where pinged
* If the difference is greater than 2 minutes
* Creates incident if a website is down and resolves it when they come back up
*/
exports.default = {
ping: (monitor) => __awaiter(void 0, void 0, void 0, function* () {
const newDate = new moment_1.default();
const resDate = new Date();
if (monitor && monitor.type) {
const d = new moment_1.default(monitor.lastPingTime);
if (newDate.diff(d, 'minutes') > 3) {
const time = yield apiService_1.default.getMonitorTime(monitor._id, newDate);
if (time.status === 'online') {
yield apiService_1.default.ping(monitor._id, {
monitor,
type: monitor.type,
});
}
}
else {
const res = new Date().getTime() - resDate.getTime();
const newTime = yield apiService_1.default.getMonitorTime(monitor._id, newDate);
if (newTime.status === 'offline') {
yield apiService_1.default.ping(monitor._id, {
monitor,
res,
type: monitor.type,
});
}
}
}
}),
};

View File

@@ -0,0 +1,86 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const apiService_1 = __importDefault(require("../Utils/apiService"));
/*
* It collects all monitors then ping them one by one to store their response
* Checks if the website of the url in the monitors is up or down
* Creates incident if a website is down and resolves it when they come back up
*/
exports.default = {
run: ({ monitor }) => __awaiter(void 0, void 0, void 0, function* () {
if (monitor && monitor.type) {
if (monitor.data.link && monitor.criteria) {
const up = monitor.criteria.up
? yield checkCondition(monitor.criteria.up)
: false;
const degraded = monitor.criteria.degraded
? yield checkCondition(monitor.criteria.degraded)
: false;
const down = monitor.criteria.down
? yield checkCondition(monitor.criteria.down)
: false;
if (up || degraded || down) {
yield apiService_1.default.ping(monitor._id, {
monitor,
res: null,
resp: null,
type: monitor.type,
retryCount: 3,
});
}
}
}
}),
};
const checkCondition = (condition) => __awaiter(void 0, void 0, void 0, function* () {
let response = false;
if (condition && condition.and && condition.and.length) {
for (let i = 0; i < condition.and.length; i++) {
if (condition.and[i] &&
condition.and[i].responseType &&
condition.and[i].responseType === 'incomingTime') {
response = true;
break;
}
else if (condition.and[i] &&
condition.and[i].collection &&
condition.and[i].collection.length) {
const tempAnd = yield checkCondition(condition.and[i].collection);
if (tempAnd) {
response = true;
}
}
}
}
else if (condition && condition.or && condition.or.length) {
for (let i = 0; i < condition.or.length; i++) {
if (condition.or[i] &&
condition.or[i].responseType &&
condition.or[i].responseType === 'incomingTime') {
response = true;
break;
}
else if (condition.or[i] &&
condition.or[i].collection &&
condition.or[i].collection.length) {
const tempOr = yield checkCondition(condition.or[i].collection);
if (tempOr) {
response = true;
}
}
}
}
return response;
});

View File

@@ -0,0 +1,75 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const apiService_1 = __importDefault(require("../Utils/apiService"));
const ping_1 = __importDefault(require("ping"));
/*
* It collects all monitors then ping them one by one to store their response
* Checks if the IP Address of the IP monitor is up or down
* Creates incident if a IP Address is down and resolves it when they come back up
*/
exports.default = {
ping: ({ monitor }) => __awaiter(void 0, void 0, void 0, function* () {
if (monitor && monitor.type) {
if (monitor.data.IPAddress) {
let retry = true;
let retryCount = 0;
while (retry || retryCount > 2) {
const { res, resp, rawResp } = yield pingfetch(monitor.data.IPAddress);
const response = yield apiService_1.default.ping(monitor._id, {
monitor,
res,
resp,
rawResp,
type: monitor.type,
retryCount,
});
if (response && !response.retry) {
retry = false;
}
else {
retryCount++;
}
}
}
}
}),
};
const pingfetch = (IPAddress) => __awaiter(void 0, void 0, void 0, function* () {
const now = new Date().getTime();
let resp = null;
let rawResp = null;
let res = null;
try {
const response = yield ping_1.default.promise.probe(IPAddress, {
timeout: 120,
extra: ['-i', '2'],
});
const isAlive = response ? response.alive : false;
res = new Date().getTime() - now;
resp = {
status: isAlive ? 200 : 408,
body: null,
};
rawResp = {
body: null,
status: isAlive ? 200 : 408,
};
}
catch (error) {
res = new Date().getTime() - now;
resp = { status: 408, body: error };
}
return { res, resp, rawResp };
});

View File

@@ -0,0 +1,614 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_1 = require("child_process");
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const node_fetch_commonjs_1 = __importDefault(require("node-fetch-commonjs"));
const uuid_1 = require("uuid");
const apiService_1 = __importDefault(require("../Utils/apiService"));
const Config_1 = require("../Config");
const fsHandlers_1 = require("../Utils/fsHandlers");
exports.default = {
run: function ({ monitor }) {
return __awaiter(this, void 0, void 0, function* () {
if (monitor &&
monitor.type &&
monitor.type === 'kubernetes' &&
monitor.kubernetesConfig) {
const configurationFile = monitor.kubernetesConfig;
const updatedConfigName = `${(0, uuid_1.v4)()}${configurationFile}`;
const configPath = path_1.default.resolve(process.cwd(), updatedConfigName);
const namespace = monitor.kubernetesNamespace || 'default';
yield (0, node_fetch_commonjs_1.default)(`${Config_1.serverUrl}/file/${configurationFile}`).then((res) => {
const dest = fs_1.default.createWriteStream(configPath);
res.body.pipe(dest);
// At this point, writing to the specified file is complete
dest.on('finish', () => __awaiter(this, void 0, void 0, function* () {
if (fs_1.default.existsSync(configPath)) {
const [podOutput, jobOutput, serviceOutput, deploymentOutput, statefulsetOutput,] = yield Promise.all([
loadPodOutput(configPath, namespace),
loadJobOutput(configPath, namespace),
loadServiceOutput(configPath, namespace),
loadDeploymentOutput(configPath, namespace),
loadStatefulsetOutput(configPath, namespace),
]);
if (podOutput &&
jobOutput &&
deploymentOutput &&
statefulsetOutput) {
// Handle pod output
const healthyPods = [], healthyPodData = [], unhealthyPods = [], unhealthyPodData = [], allPods = [], allPodData = [];
let runningPods = 0, completedPods = 0, failedPods = 0;
podOutput.items.forEach((item) => {
/**
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podstatus-v1-core
*/
if (item.status.phase !== 'Running' &&
item.status.phase !== 'Succeeded') {
unhealthyPods.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
podResourceVersion: item.metadata.resourceVersion,
podUid: item.metadata.uid,
podSelfLink: item.metadata.selfLink,
podConditions: item.status.conditions,
podContainerStatuses: item.status.containerStatuses,
podContainers: item.spec.containers,
});
unhealthyPodData.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
});
failedPods += 1;
}
else {
healthyPods.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
podResourceVersion: item.metadata.resourceVersion,
podUid: item.metadata.uid,
podSelfLink: item.metadata.selfLink,
podConditions: item.status.conditions,
podContainerStatuses: item.status.containerStatuses,
podContainers: item.spec.containers,
});
healthyPodData.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
});
if (item.status.phase === 'Running') {
++runningPods;
}
if (item.status.phase === 'Succeeded') {
++completedPods;
}
}
allPods.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
podResourceVersion: item.metadata.resourceVersion,
podUid: item.metadata.uid,
podSelfLink: item.metadata.selfLink,
podConditions: item.status.conditions,
podContainerStatuses: item.status.containerStatuses,
podContainers: item.spec.containers,
});
allPodData.push({
podName: item.metadata.name,
podNamespace: item.metadata.namespace,
podStatus: item.status.phase,
podCreationTimestamp: item.metadata.creationTimestamp,
podRestart: item.status &&
item.status.containerStatuses &&
item.status.containerStatuses[0]
? item.status
.containerStatuses[0]
.restartCount
: 0,
});
});
const podData = {
podStat: {
healthy: healthyPods.length,
unhealthy: unhealthyPods.length,
runningPods,
completedPods,
failedPods,
totalPods: podOutput.items.length,
},
healthyPods,
unhealthyPods,
allPods,
healthyPodData,
unhealthyPodData,
allPodData,
};
// Handle job output
const runningJobs = [], succeededJobs = [], failedJobs = [], runningJobData = [], succeededJobData = [], failedJobData = [];
jobOutput.items.forEach((item) => {
/**
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#job-v1-batch
*/
if (item.status && item.status.active > 0) {
runningJobs.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'running',
jobCreationTimestamp: item.metadata.creationTimestamp,
jobResourceVersion: item.metadata.resourceVersion,
jobUid: item.metadata.uid,
jobSelfLink: item.metadata.selfLink,
jobConditions: item.status.conditions,
});
runningJobData.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'running',
jobCreationTimestamp: item.metadata.creationTimestamp,
});
}
else if (item.status &&
item.status.succeeded > 0) {
succeededJobs.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'succeeded',
jobCreationTimestamp: item.metadata.creationTimestamp,
jobResourceVersion: item.metadata.resourceVersion,
jobUid: item.metadata.uid,
jobSelfLink: item.metadata.selfLink,
jobConditions: item.status.conditions,
});
succeededJobData.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'succeeded',
jobCreationTimestamp: item.metadata.creationTimestamp,
});
}
else if (item.status &&
item.status.failed > 0) {
failedJobs.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'failed',
jobCreationTimestamp: item.metadata.creationTimestamp,
jobResourceVersion: item.metadata.resourceVersion,
jobUid: item.metadata.uid,
jobSelfLink: item.metadata.selfLink,
jobConditions: item.status.conditions,
});
failedJobData.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'failed',
jobCreationTimestamp: item.metadata.creationTimestamp,
});
}
else {
failedJobs.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'failed',
jobCreationTimestamp: item.metadata.creationTimestamp,
jobResourceVersion: item.metadata.resourceVersion,
jobUid: item.metadata.uid,
jobSelfLink: item.metadata.selfLink,
jobConditions: item.status.conditions,
});
failedJobData.push({
jobName: item.metadata.name,
jobNamespace: item.metadata.namespace,
jobStatus: 'failed',
jobCreationTimestamp: item.metadata.creationTimestamp,
});
}
});
const jobData = {
jobStat: {
runningJobs: runningJobs.length,
succeededJobs: succeededJobs.length,
failedJobs: failedJobs.length,
totalJobs: runningJobs.length +
succeededJobs.length +
failedJobs.length,
healthy: runningJobs.length +
succeededJobs.length,
unhealthy: failedJobs.length,
},
runningJobs,
succeededJobs,
failedJobs,
allJobs: [
...runningJobs,
...succeededJobs,
...failedJobs,
],
allJobData: [
...runningJobData,
...succeededJobData,
...failedJobData,
],
healthyJobs: [
...runningJobs,
...succeededJobs,
],
healthyJobData: [
...runningJobData,
...succeededJobData,
],
unhealthyJobs: [...failedJobs],
unhealthyJobData: [...failedJobData],
};
// Handle services output
const serviceData = {
runningServices: serviceOutput.items.length,
};
// Handle deployment output
let desiredDeployment = 0, readyDeployment = 0;
const unhealthyDeployments = [], healthyDeployments = [], allDeployments = [], unhealthyDeploymentData = [], healthyDeploymentData = [], allDeploymentData = [];
deploymentOutput.items.forEach((item) => {
if (item.status.readyReplicas) {
readyDeployment +=
item.status.readyReplicas;
}
else {
readyDeployment += 0;
}
desiredDeployment +=
item.status.replicas;
if (item.status.readyReplicas !==
item.status.replicas) {
unhealthyDeployments.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata
.creationTimestamp,
readyDeployment: item.status.readyReplicas ||
0,
desiredDeployment: item.status.replicas,
deploymentResourceVersion: item.metadata
.resourceVersion,
deploymentUid: item.metadata.uid,
deploymentSelfLink: item.metadata.selfLink,
deploymentConditions: item.status.conditions,
});
unhealthyDeploymentData.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata
.creationTimestamp,
readyDeployment: item.status.readyReplicas ||
0,
desiredDeployment: item.status.replicas,
});
}
else {
healthyDeployments.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata
.creationTimestamp,
readyDeployment: item.status.readyReplicas,
desiredDeployment: item.status.replicas,
deploymentResourceVersion: item.metadata
.resourceVersion,
deploymentUid: item.metadata.uid,
deploymentSelfLink: item.metadata.selfLink,
deploymentConditions: item.status.conditions,
});
healthyDeploymentData.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata
.creationTimestamp,
readyDeployment: item.status.readyReplicas,
desiredDeployment: item.status.replicas,
});
}
allDeployments.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata.creationTimestamp,
readyDeployment: item.status.readyReplicas || 0,
desiredDeployment: item.status.replicas,
deploymentResourceVersion: item.metadata.resourceVersion,
deploymentUid: item.metadata.uid,
deploymentSelfLink: item.metadata.selfLink,
deploymentConditions: item.status.conditions,
});
allDeploymentData.push({
deploymentName: item.metadata.name,
deploymentNamespace: item.metadata.namespace,
deploymentCreationTimestamp: item.metadata.creationTimestamp,
readyDeployment: item.status.readyReplicas || 0,
desiredDeployment: item.status.replicas,
});
});
const deploymentData = {
desiredDeployment,
readyDeployment,
healthyDeployments,
unhealthyDeployments,
allDeployments,
healthy: healthyDeployments.length,
unhealthy: unhealthyDeployments.length,
healthyDeploymentData,
unhealthyDeploymentData,
allDeploymentData,
};
// Handle statefulset output
let desiredStatefulsets = 0, readyStatefulsets = 0;
const healthyStatefulsets = [], unhealthyStatefulsets = [], allStatefulset = [], healthyStatefulsetData = [], unhealthyStatefulsetData = [], allStatefulsetData = [];
statefulsetOutput.items.forEach((item) => {
if (item.status.readyReplicas) {
readyStatefulsets +=
item.status.readyReplicas;
}
else {
readyStatefulsets += 0;
}
desiredStatefulsets +=
item.status.replicas;
if (item.status.readyReplicas !==
item.status.replicas) {
unhealthyStatefulsets.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata
.creationTimestamp,
readyStatefulsets: item.status.readyReplicas ||
0,
desiredStatefulsets: item.status.replicas,
statefulsetResourceVersion: item.metadata
.resourceVersion,
statefulsetUid: item.metadata.uid,
statefulsetSelfLink: item.metadata.selfLink,
});
unhealthyStatefulsetData.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata
.creationTimestamp,
readyStatefulsets: item.status.readyReplicas ||
0,
desiredStatefulsets: item.status.replicas,
});
}
else {
healthyStatefulsets.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata
.creationTimestamp,
readyStatefulsets: item.status.readyReplicas,
desiredStatefulsets: item.status.replicas,
statefulsetResourceVersion: item.metadata
.resourceVersion,
statefulsetUid: item.metadata.uid,
statefulsetSelfLink: item.metadata.selfLink,
});
healthyStatefulsetData.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata
.creationTimestamp,
readyStatefulsets: item.status.readyReplicas,
desiredStatefulsets: item.status.replicas,
});
}
allStatefulset.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata.creationTimestamp,
readyStatefulsets: item.status.readyReplicas || 0,
desiredStatefulsets: item.status.replicas,
statefulsetResourceVersion: item.metadata.resourceVersion,
statefulsetUid: item.metadata.uid,
statefulsetSelfLink: item.metadata.selfLink,
});
allStatefulsetData.push({
statefulsetName: item.metadata.name,
statefulsetNamespace: item.metadata.namespace,
statefulsetCreationTimestamp: item.metadata.creationTimestamp,
readyStatefulsets: item.status.readyReplicas || 0,
desiredStatefulsets: item.status.replicas,
});
});
const statefulsetData = {
readyStatefulsets,
desiredStatefulsets,
healthyStatefulsets,
unhealthyStatefulsets,
allStatefulset,
healthy: healthyStatefulsets.length,
unhealthy: unhealthyStatefulsets.length,
healthyStatefulsetData,
unhealthyStatefulsetData,
allStatefulsetData,
};
const data = {
podData,
jobData,
serviceData,
deploymentData,
statefulsetData,
};
yield apiService_1.default.ping(monitor._id, {
monitor,
kubernetesData: data,
type: monitor.type,
});
// Remove the config file
yield (0, fsHandlers_1.deleteFile)(configPath);
}
}
// Remove the config file
yield (0, fsHandlers_1.deleteFile)(configPath);
}));
dest.on('error', (error) => __awaiter(this, void 0, void 0, function* () {
yield (0, fsHandlers_1.deleteFile)(configPath);
throw error;
}));
});
}
});
},
};
function loadPodOutput(configPath, namespace) {
return new Promise((resolve) => {
let podOutput = '';
const podCommand = `kubectl get pods -o json --kubeconfig ${configPath} --namespace ${namespace}`;
const podCommandOutput = (0, child_process_1.spawn)(podCommand, {
cwd: process.cwd(),
shell: true,
});
podCommandOutput.stdout.on('data', (data) => {
const strData = data.toString();
podOutput += strData;
});
podCommandOutput.on('close', () => {
if (podOutput) {
podOutput = JSON.parse(podOutput);
}
resolve(podOutput);
});
});
}
function loadJobOutput(configPath, namespace) {
return new Promise((resolve) => {
let jobOutput = '';
const jobCommand = `kubectl get jobs -o json --kubeconfig ${configPath} --namespace ${namespace}`;
const jobCommandOutput = (0, child_process_1.spawn)(jobCommand, {
cwd: process.cwd(),
shell: true,
});
jobCommandOutput.stdout.on('data', (data) => {
const strData = data.toString();
jobOutput += strData;
});
jobCommandOutput.on('close', () => {
if (jobOutput) {
jobOutput = JSON.parse(jobOutput);
}
resolve(jobOutput);
});
});
}
function loadServiceOutput(configPath, namespace) {
return new Promise((resolve) => {
let serviceOutput = '';
const serviceCommand = `kubectl get services -o json --kubeconfig ${configPath} --namespace ${namespace}`;
const serviceCommandOutput = (0, child_process_1.spawn)(serviceCommand, {
cwd: process.cwd(),
shell: true,
});
serviceCommandOutput.stdout.on('data', (data) => {
const strData = data.toString();
serviceOutput += strData;
});
serviceCommandOutput.on('close', () => {
if (serviceOutput) {
serviceOutput = JSON.parse(serviceOutput);
}
resolve(serviceOutput);
});
});
}
function loadDeploymentOutput(configPath, namespace) {
return new Promise((resolve) => {
let deploymentOutput = '';
const deploymentCommand = `kubectl get deployments -o json --kubeconfig ${configPath} --namespace ${namespace}`;
const deploymentCommandOutput = (0, child_process_1.spawn)(deploymentCommand, {
cwd: process.cwd(),
shell: true,
});
deploymentCommandOutput.stdout.on('data', (data) => {
const strData = data.toString();
deploymentOutput += strData;
});
deploymentCommandOutput.on('close', () => {
if (deploymentOutput) {
deploymentOutput = JSON.parse(deploymentOutput);
}
resolve(deploymentOutput);
});
});
}
function loadStatefulsetOutput(configPath, namespace) {
return new Promise((resolve) => {
let statefulsetOutput = '';
const statefulsetCommand = `kubectl get statefulsets -o json --kubeconfig ${configPath} --namespace ${namespace}`;
const statefulsetCommandOutput = (0, child_process_1.spawn)(statefulsetCommand, {
cwd: process.cwd(),
shell: true,
});
statefulsetCommandOutput.stdout.on('data', (data) => {
const strData = data.toString();
statefulsetOutput += strData;
});
statefulsetCommandOutput.on('close', () => {
if (statefulsetOutput) {
statefulsetOutput = JSON.parse(statefulsetOutput);
}
resolve(statefulsetOutput);
});
});
}

78
Probe/Build-temp/dist/workers/main.js vendored Normal file
View File

@@ -0,0 +1,78 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const Logger_1 = __importDefault(require("CommonServer/Utils/Logger"));
const api_1 = __importDefault(require("../Utils/api"));
const apiMonitors_1 = __importDefault(require("./apiMonitors"));
const urlMonitors_1 = __importDefault(require("./urlMonitors"));
const ipMonitors_1 = __importDefault(require("./ipMonitors"));
const serverMonitors_1 = __importDefault(require("./serverMonitors"));
const incomingHttpRequestMonitors_1 = __importDefault(require("./incomingHttpRequestMonitors"));
const kubernetesMonitors_1 = __importDefault(require("./kubernetesMonitors"));
let limit = process.env['RESOURCES_LIMIT'];
if (limit && typeof limit === 'string') {
limit = parseInt(limit);
}
const await_sleep_1 = __importDefault(require("await-sleep"));
const _this = {
runJob: function () {
return __awaiter(this, void 0, void 0, function* () {
Logger_1.default.info(`Getting a list of ${limit} monitors`);
let monitors = yield api_1.default.get('probe/monitors', limit);
monitors = JSON.parse(monitors.data); // Parse the stringified data
Logger_1.default.info(`Number of Monitors fetched - ${monitors.length} monitors`);
if (monitors.length === 0) {
// There are no monitors to monitor. Sleep for 30 seconds and then wake up.
Logger_1.default.info('No monitors to monitor. Sleeping for 30 seconds.');
yield (0, await_sleep_1.default)(30 * 1000);
}
// Loop over the monitor
for (const monitor of monitors) {
Logger_1.default.info(`Monitor ID ${monitor._id}: Currently monitoring`);
if (monitor.type === 'api') {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring API monitor`);
yield apiMonitors_1.default.ping({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring API monitor`);
}
else if (monitor.type === 'url') {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring URL monitor`);
yield urlMonitors_1.default.ping({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring URL monitor`);
}
else if (monitor.type === 'ip') {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring IP monitor`);
yield ipMonitors_1.default.ping({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring IP monitor`);
}
else if (monitor.type === 'server-monitor' &&
monitor.agentlessConfig) {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring Server monitor`);
yield serverMonitors_1.default.run({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring Server monitor`);
}
else if (monitor.type === 'incomingHttpRequest') {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring Incoming HTTP Request monitor`);
yield incomingHttpRequestMonitors_1.default.run({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring Incoming HTTP Request monitor`);
}
else if (monitor.type === 'kubernetes') {
Logger_1.default.info(`Monitor ID ${monitor._id}: Start monitoring Kubernetes monitor`);
yield kubernetesMonitors_1.default.run({ monitor });
Logger_1.default.info(`Monitor ID ${monitor._id}: End monitoring Kubernetes monitor`);
}
}
});
},
};
exports.default = _this;

View File

@@ -0,0 +1,325 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const apiService_1 = __importDefault(require("../Utils/apiService"));
const fs_1 = __importDefault(require("fs"));
const node_ssh_1 = require("node-ssh");
const node_fetch_commonjs_1 = __importDefault(require("node-fetch-commonjs"));
const Config_1 = require("../Config");
exports.default = {
run: ({ monitor }) => __awaiter(void 0, void 0, void 0, function* () {
if (monitor &&
monitor.type &&
monitor.agentlessConfig &&
typeof monitor.agentlessConfig === 'object') {
const { host, port, username, authentication, password, identityFile, } = monitor.agentlessConfig;
const ssh = new node_ssh_1.NodeSSH();
const config = {
host,
port,
username,
};
if (authentication === 'password') {
config.password = password;
}
else {
yield (0, node_fetch_commonjs_1.default)(`${Config_1.serverUrl}/file/${identityFile}`).then((res) => {
return new Promise((resolve, reject) => {
const dest = fs_1.default.createWriteStream(`./${identityFile}`);
res.body.pipe(dest);
res.body.on('end', () => {
setTimeout(() => {
config.privateKey = fs_1.default.readFileSync(`./${identityFile}`, 'utf8');
resolve();
}, 1000);
});
dest.on('error', reject);
});
});
fs_1.default.unlinkSync(`./${identityFile}`);
}
ssh.connect(config).then(() => __awaiter(void 0, void 0, void 0, function* () {
let os;
try {
const { stdout: osLine, stderr } = yield ssh.execCommand('uname -a');
if (stderr) {
throw stderr;
}
os = osLine.split(' ')[0];
}
catch (e) {
const { stdout: osLine } = yield ssh.execCommand('wmic os get name');
os = osLine.split(' ')[1];
}
const serverData = yield execCommands(ssh, os);
ssh.dispose();
yield apiService_1.default.ping(monitor._id, {
monitor,
serverData,
type: monitor.type,
});
}));
}
}),
};
const execCommands = (exec, os) => __awaiter(void 0, void 0, void 0, function* () {
const isSSH = exec instanceof node_ssh_1.NodeSSH;
// TODO: complete commands and make platform specific
let cpuLoad, avgCpuLoad, cpuCores, memoryUsed, totalMemory, swapUsed, storageUsed, totalStorage, storageUsage, mainTemp, maxTemp;
if (os === 'Linux') {
const { stdout: load } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.linux.load)
: exec(Config_1.COMMAND.linux.load));
const { stdout: cpu } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.linux.cpu)
: exec(Config_1.COMMAND.linux.cpu));
const { stdout: mem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.linux.mem)
: exec(Config_1.COMMAND.linux.mem));
const { stdout: disk } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.linux.disk)
: exec(Config_1.COMMAND.linux.disk));
const { stdout: temp } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.linux.temp)
: exec(Config_1.COMMAND.linux.temp));
const loadLines = load
.replace(/\t|:|,|-/gi, '')
.trim()
.split('\n')
.map((line) => {
const words = line
.replace(/\s+/g, ' ')
.trim()
.split(' ');
return words;
});
const cpuLines = cpu
.replace(/\t|:/gi, '')
.trim()
.split('\n')
.map((line) => {
return line.replace(/\s+/g, ' ').trim();
});
const memLines = mem
.replace(/\t|:/gi, '')
.trim()
.split('\n')
.map((line) => {
const words = line
.replace(/\s+/g, ' ')
.trim()
.split(' ');
return words[words.length - 2];
});
const diskLines = disk
.replace(/\t|:|M|G|%/gi, '')
.trim()
.split('\n')
.map((line) => {
const words = line
.replace(/\s+/g, ' ')
.trim()
.split(' ');
return {
storageUsed: words[2],
totalStorage: words[1],
storageUsage: words[4],
};
})
.reduce((disks, disk) => {
return {
storageUsed: disks.storageUsed + disk.storageUsed,
totalStorage: disks.totalStorage + disk.totalStorage,
storageUsage: disks.storageUsage + disk.storageUsage,
};
});
const tempLines = temp
.replace(/\t|:|\+|°|C/gi, '')
.replace(/\s+/g, ' ')
.trim()
.split(' ');
cpuLoad = loadLines[3][1];
avgCpuLoad = loadLines[2][10];
cpuCores = cpuLines.length / 2;
memoryUsed = (parseFloat(memLines[0]) - parseFloat(memLines[1])) * 1024;
totalMemory = memLines[0] * 1024;
swapUsed = (parseFloat(memLines[4]) - parseFloat(memLines[5])) * 1024;
storageUsed = diskLines.storageUsed * 1024 * 1024 * 1024;
totalStorage = diskLines.totalStorage * 1024 * 1024 * 1024;
storageUsage = diskLines.storageUsage;
mainTemp = tempLines[1];
maxTemp = tempLines[1];
}
else if (os === 'Darwin') {
const { stdout: load } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.load)
: exec(Config_1.COMMAND.darwin.load));
const { stdout: cpu } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.cpu)
: exec(Config_1.COMMAND.darwin.cpu));
const { stdout: usedMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.mem.used)
: exec(Config_1.COMMAND.darwin.mem.used));
const { stdout: totalMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.mem.total)
: exec(Config_1.COMMAND.darwin.mem.total));
const { stdout: swapMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.mem.swap)
: exec(Config_1.COMMAND.darwin.mem.swap));
const { stdout: disk } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.disk)
: exec(Config_1.COMMAND.darwin.disk));
const { stdout: temp } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.darwin.temp)
: exec(Config_1.COMMAND.darwin.temp));
const loadLines = load
.replace(/\t|:|,|-|%/gi, '')
.trim()
.split('\n')
.map((line) => {
const words = line
.replace(/\s+/g, ' ')
.trim()
.split(' ');
return words;
});
const memLines = usedMem
.replace(/\t|:|M|G|\(|\)/gi, '')
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const swapLines = swapMem
.replace(/\t|:|M|G|\(|\)|=/gi, '')
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const diskLines = disk
.replace(/\t|:|Mi|Gi|%/gi, '')
.trim()
.split('\n')
.map((line) => {
const words = line
.replace(/\s+/g, ' ')
.trim()
.split(' ');
return {
storageUsed: words[2],
totalStorage: words[1],
storageUsage: words[4],
};
})
.reduce((disks, disk) => {
return {
storageUsed: disks.storageUsed + disk.storageUsed,
totalStorage: disks.totalStorage + disk.totalStorage,
storageUsage: disks.storageUsage + disk.storageUsage,
};
});
cpuLoad = loadLines[1][2];
avgCpuLoad = loadLines[0][3];
cpuCores = cpu.replace('\n', '');
memoryUsed =
(parseFloat(memLines[1]) - parseFloat(memLines[3])) * 1024 * 1024;
totalMemory = totalMem.replace('\n', '');
swapUsed = swapLines[3] * 1024 * 1024;
storageUsed = diskLines.storageUsed * 1024 * 1024 * 1024;
totalStorage = diskLines.totalStorage * 1024 * 1024 * 1024;
storageUsage = diskLines.storageUsage;
mainTemp = temp.replace('\n', '');
maxTemp = temp.replace('\n', '');
}
else if (os === 'Windows') {
const { stdout: load } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.load)
: exec(Config_1.COMMAND.win.load));
const { stdout: cpu } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.cpu)
: exec(Config_1.COMMAND.win.cpu));
const { stdout: freeMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.mem.free)
: exec(Config_1.COMMAND.win.mem.free));
const { stdout: totalMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.mem.total)
: exec(Config_1.COMMAND.win.mem.total));
const { stdout: totalSwapMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.mem.totalSwap)
: exec(Config_1.COMMAND.win.mem.totalSwap));
const { stdout: freeSwapMem } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.mem.freeSwap)
: exec(Config_1.COMMAND.win.mem.freeSwap));
const { stdout: freeDisk } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.disk.free)
: exec(Config_1.COMMAND.win.disk.free));
const { stdout: totalDisk } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.disk.total)
: exec(Config_1.COMMAND.win.disk.total));
const { stdout: temp } = yield (isSSH
? exec.execCommand(Config_1.COMMAND.win.temp)
: exec(Config_1.COMMAND.win.temp));
const loadLines = load.replace(/\s+/g, ' ').trim().split(' ');
const cpuLines = cpu.replace(/\s+/g, ' ').trim().split(' ');
const freeMemLines = freeMem
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const totalMemLines = totalMem
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const totalSwapMemLines = totalSwapMem
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const freeSwapMemLines = freeSwapMem
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const freeDiskLines = freeDisk
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const totalDiskLines = totalDisk
.replace(/\s+/g, ' ')
.trim()
.split(' ');
const tempLines = temp.replace(/\s+/g, ' ').trim().split(' ');
cpuLoad = loadLines[1];
avgCpuLoad = loadLines[1];
cpuCores = cpuLines[1];
memoryUsed =
parseFloat(totalMemLines[1]) - parseFloat(freeMemLines[1]) * 1024;
totalMemory = totalMemLines[1];
swapUsed =
parseFloat(totalSwapMemLines[1]) - parseFloat(freeSwapMemLines[1]);
storageUsed =
parseFloat(totalDiskLines[1]) - parseFloat(freeDiskLines[1]);
totalStorage = totalDiskLines[1];
storageUsage = (storageUsed / parseFloat(totalDiskLines[1])) * 100;
mainTemp = tempLines[1];
maxTemp = tempLines[1];
}
return {
cpuLoad,
avgCpuLoad,
cpuCores,
memoryUsed,
totalMemory,
swapUsed,
storageUsed,
totalStorage,
storageUsage,
mainTemp,
maxTemp,
};
});

View File

@@ -0,0 +1,51 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const apiService_1 = __importDefault(require("../Utils/apiService"));
const Logger_1 = __importDefault(require("CommonServer/Utils/Logger"));
const pingFetch_1 = __importDefault(require("../Utils/pingFetch"));
/*
* It collects all monitors then ping them one by one to store their response
* Checks if the website of the url in the monitors is up or down
* Creates incident if a website is down and resolves it when they come back up
*/
exports.default = {
ping: ({ monitor }) => __awaiter(void 0, void 0, void 0, function* () {
if (monitor && monitor.type) {
if (monitor.data.url) {
let retry = true;
let retryCount = 0;
while (retry || retryCount > 2) {
const { res, resp, rawResp } = yield (0, pingFetch_1.default)(monitor.data.url);
Logger_1.default.info(`Monitor ID ${monitor._id}: Start saving data to ingestor.`);
const response = yield apiService_1.default.ping(monitor._id, {
monitor,
res,
resp,
rawResp,
type: monitor.type,
retryCount,
});
Logger_1.default.info(`Monitor ID ${monitor._id}: End saving data to ingestor.`);
if (response && !response.retry) {
retry = false;
}
else {
retryCount++;
}
}
}
}
}),
};

View File

@@ -70,7 +70,10 @@
"uuid": "^8.3.2"
},
"devDependencies": {
"@types/node": "^17.0.22"
"@types/jest": "^27.4.1",
"@types/node": "^17.0.22",
"jest": "^27.5.1",
"ts-jest": "^27.1.4"
}
},
"node_modules/@babel/helper-get-function-arity": {
@@ -2988,14 +2991,17 @@
"Common": {
"version": "file:../Common",
"requires": {
"@types/jest": "^27.4.1",
"@types/nanoid-dictionary": "^4.2.0",
"@types/node": "^17.0.22",
"@types/uuid": "^8.3.4",
"axios": "^0.26.1",
"jest": "^27.5.1",
"moment": "^2.29.2",
"nanoid": "^3.3.2",
"nanoid-dictionary": "^4.3.0",
"slugify": "^1.6.5",
"ts-jest": "^27.1.4",
"uuid": "^8.3.2"
}
},