Compare commits

...

422 Commits

Author SHA1 Message Date
Simon Larsen
8be022e34f refactor: clean up state initialization and formatting in IncidentFeed component 2025-01-16 13:27:31 +00:00
Simon Larsen
ce6938396a feat: add modals for creating public and private incident notes 2025-01-16 13:25:32 +00:00
Simon Larsen
537e2d02e7 feat: enhance incident update permissions and enable root cause editing 2025-01-16 13:07:27 +00:00
Simon Larsen
80b9e48771 fix: remove space before incident number in IncidentsTable and IncidentView components 2025-01-16 12:57:49 +00:00
Simon Larsen
e0c3437c45 feat: implement otel-collector deployment with configurable replica count 2025-01-16 12:54:43 +00:00
Simon Larsen
e5df15a53e test: update expected value for ColumnLength.Color to reflect changes 2025-01-15 21:09:53 +00:00
Simon Larsen
a0f6e979b8 refactor: enhance readability by restructuring Feed and FeedItem components 2025-01-15 20:36:18 +00:00
Simon Larsen
e20624a635 refactor: invert isLastItem condition for rendering FeedItem separator 2025-01-15 20:29:16 +00:00
Simon Larsen
800583ddde refactor: enforce isLastItem prop as required in FeedItem component 2025-01-15 20:29:02 +00:00
Simon Larsen
f0bc71bee4 refactor: improve code readability and structure in OnCallDutyPolicyExecutionLog services and Feed components 2025-01-15 20:26:30 +00:00
Simon Larsen
ba0dd4f2b0 refactor: add display color handling by OnCallDutyPolicy status and update notification displayColor logic 2025-01-15 20:23:11 +00:00
Simon Larsen
63b560ad93 refactor: improve markdown formatting in notifications and update query id handling 2025-01-15 20:20:36 +00:00
Simon Larsen
ae0553a1e5 refactor: update IncidentFeed and OnCallDutyPolicyExecutionLogTimeline services for improved type handling and logging 2025-01-15 19:48:00 +00:00
Simon Larsen
fd4e2737e5 refactor: enhance notification messages to include incident state details and public note content 2025-01-15 19:23:57 +00:00
Simon Larsen
15f18c6d4f refactor: enhance notification messages and add projectId to relevant data structures 2025-01-15 18:52:35 +00:00
Simon Larsen
a84e32fe1b refactor: enhance logging and improve incident feed notification messages across multiple services 2025-01-15 18:37:18 +00:00
Simon Larsen
c09d97310f refactor: update ProgressButtons and ChangeIncidentState components to support completedStepId and improve click handling 2025-01-15 17:35:33 +00:00
Simon Larsen
782eb45eb3 refactor: improve formatting and consistency in multiple components 2025-01-14 21:20:21 +00:00
Simon Larsen
fb37da0aca refactor: update ErrorMessage component to use 'message' prop instead of 'error' across multiple components 2025-01-14 21:16:17 +00:00
Simon Larsen
ae2608e66f refactor: update ErrorMessage component usage to use 'message' prop instead of 'error' across multiple files 2025-01-14 21:01:40 +00:00
Simon Larsen
29565bc24c refactor: clean up unnecessary whitespace and improve code formatting across multiple files 2025-01-14 19:35:54 +00:00
Simon Larsen
da7860fc3f feat: add new incident view pages for Root Cause and Description; update icon properties and breadcrumb links 2025-01-14 19:35:39 +00:00
Simon Larsen
0375e8c568 refactor: improve code formatting in ProgressButtonItem and IncidentFeed components for better readability 2025-01-14 19:22:09 +00:00
Simon Larsen
60c53b32e6 refactor: update ProgressButtonItem and ProgressButtons components for improved structure and readability; enhance error handling in IncidentFeed 2025-01-14 19:21:59 +00:00
Simon Larsen
4d1797e9fa refactor: clean up code formatting and remove unnecessary whitespace in various files 2025-01-14 18:16:51 +00:00
Simon Larsen
1ff4bde7b9 feat: enhance incident feed markdown for OnCallDutyPolicyExecutionLogTimeline with detailed alert information 2025-01-14 18:13:40 +00:00
Simon Larsen
fae6d89a18 feat: add OnCallNotification event type to IncidentFeed and implement related handling in OnCallDutyPolicyExecutionLogTimelineService 2025-01-14 18:09:30 +00:00
Simon Larsen
aa429abe01 feat: implement ProgressButtons and ProgressButtonItem components for enhanced step navigation 2025-01-14 16:01:56 +00:00
Simon Larsen
203763aa95 feat: add OnCallPolicy event type to IncidentFeed and implement related feed updates in OnCallDutyPolicyExecutionLogService 2025-01-14 15:38:05 +00:00
Simon Larsen
0b69ae195f feat: implement onBeforeDelete and onDeleteSuccess methods to handle OwnerTeamRemoved events in IncidentOwnerTeamService 2025-01-14 14:47:28 +00:00
Simon Larsen
0756a43d62 feat: add OwnerUserRemoved and OwnerTeamRemoved event types to IncidentFeed; update feedInfoInMarkdown and improve icon handling 2025-01-14 14:45:32 +00:00
Simon Larsen
13eb3205a2 feat: add RootCause and RemediationNotes event types to IncidentFeed; update feedInfoInMarkdown formatting and icons 2025-01-14 14:12:51 +00:00
Simon Larsen
9dcd295fd0 feat: update incident feed event types for owner additions and enhance related services 2025-01-14 13:59:24 +00:00
Simon Larsen
1fb84ea302 fix: update incidentId references and improve notification messages for clarity 2025-01-14 13:43:31 +00:00
Simon Larsen
53ea3d32dc feat: update incident feed colors and improve notification messages; add refresh button to IncidentFeedElement 2025-01-14 12:36:52 +00:00
Simon Larsen
5c9ec28a4e fix: format feedInfoInMarkdown for better readability in IncidentService 2025-01-14 12:26:30 +00:00
Simon Larsen
04c2293378 feat: add postedAt field to IncidentFeed, AlertFeed, and ScheduledMaintenanceFeed; update services to set postedAt on creation 2025-01-14 12:15:49 +00:00
Simon Larsen
6c672c541a feat: include createdByUserId in IncidentFeed creation for better tracking of incident authors 2025-01-14 11:04:21 +00:00
Simon Larsen
169db73704 fix: remove margin from no items message in Feed component for better layout 2025-01-14 11:01:10 +00:00
Simon Larsen
e980f00f1c feat: add noItemsMessage prop to Feed component and update IncidentFeedElement to display error message when no items are present 2025-01-14 11:00:07 +00:00
Simon Larsen
66a594ed99 feat: replace Feed component with IncidentFeedElement in IncidentView 2025-01-14 10:19:43 +00:00
Simon Larsen
52c35c1e4d feat: update userId handling in IncidentFeed and AlertFeed services; increase LIMIT_PER_PROJECT to 10000; refactor User class to UserUtil; add IncidentFeedElement component 2025-01-13 18:39:06 +00:00
Simon Larsen
caa4103b48 feat: add userId field to AlertFeed, IncidentFeed, and ScheduledMaintenanceFeed services 2025-01-13 17:28:44 +00:00
Simon Larsen
f6069ca4a7 feat: remove tls.verify option from Fluent Bit configuration 2025-01-13 17:22:48 +00:00
Simon Larsen
f519748c44 feat: add userId field and relationship to AlertFeed, IncidentFeed, and ScheduledMaintenanceFeed models 2025-01-13 17:19:37 +00:00
Simon Larsen
27de0f0ddb feat: update displayColor column length to 10 for AlertFeed, IncidentFeed, and ScheduledMaintenanceFeed 2025-01-13 17:14:36 +00:00
Simon Larsen
5426c22740 feat: update moreInformationInMarkdown fields to be optional in IncidentFeed, AlertFeed, and ScheduledMaintenanceFeed 2025-01-13 17:00:46 +00:00
Simon Larsen
a55d4d1e02 refactor: format migration name and clean up up/down methods for consistency 2025-01-13 16:20:51 +00:00
Simon Larsen
2f65b1ee82 feat: add migration to drop IncidentLog, AlertLog, and ScheduledMaintenanceLog tables 2025-01-13 15:04:00 +00:00
Simon Larsen
730dc56316 feat: add migration for IncidentFeed, AlertFeed, and ScheduledMaintenanceFeed tables 2025-01-13 14:59:13 +00:00
Simon Larsen
101e697a12 refactor: update BaseAPI instantiation for ScheduledMaintenanceFeed to improve type clarity 2025-01-13 14:56:18 +00:00
Simon Larsen
1c1488bca5 refactor: replace IncidentLogService with IncidentFeedService in various services 2025-01-13 14:52:55 +00:00
Simon Larsen
0e74adbd08 refactor: simplify FeedItem component and clean up IncidentView layout 2025-01-13 14:11:04 +00:00
Simon Larsen
35947b4010 fix: remove redundant empty line in IncidentView component 2025-01-13 13:27:06 +00:00
Simon Larsen
b304ceddbb feat: add Incident Feed component to display incident updates 2025-01-13 11:25:52 +00:00
Simon Larsen
cba6c48673 fix: update API request method from GET to POST in public-api documentation 2025-01-13 11:12:04 +00:00
Simon Larsen
63b40cde75 fix: update titles for subscription fields in EmailSubscribers component 2025-01-13 10:54:26 +00:00
Simon Larsen
439f2e87a2 feat: add subscription confirmation field to EmailSubscribers component 2025-01-13 10:53:18 +00:00
Simon Larsen
fc0869d9fe fix: correct query merging order in BaseModelTable component 2025-01-13 10:50:50 +00:00
Simon Larsen
fb5646e8c2 feat: update incident log event type and add logging for subscriber notifications in Incident services 2025-01-13 10:37:45 +00:00
Simon Larsen
7538a47be5 fix: update userName type to string and ensure proper string conversion in IncidentNote services 2025-01-13 08:50:31 +00:00
Simon Larsen
8568cc0c89 feat: implement logging for internal and public notes creation in IncidentNote services 2025-01-13 08:48:58 +00:00
Simon Larsen
2457de9757 feat: update IncidentLog and AlertLog models to use displayColor and event type enums 2025-01-12 18:21:08 +00:00
Simon Larsen
a6859631ae feat: add validation for required fields in AlertLogService, IncidentLogService, and ScheduledMaintenanceLogService 2025-01-12 17:50:45 +00:00
Simon Larsen
e737444c52 feat: add severity fields to AlertLog, IncidentLog, and ScheduledMaintenanceLog models 2025-01-12 17:40:25 +00:00
Simon Larsen
b4a2726c81 refactor: improve code formatting and readability in BaseAPI and IncidentView 2025-01-12 14:57:17 +00:00
Simon Larsen
5fc60491ff feat: reorder fields in IncidentView to improve layout and visibility of Incident ID 2025-01-12 14:53:12 +00:00
Simon Larsen
a7558535a6 feat: add IncidentLogService, AlertLogService, and ScheduledMaintenanceLogService to BaseAPI 2025-01-12 13:28:36 +00:00
Simon Larsen
408d06edb9 feat: add AlertLogService, IncidentLogService, and ScheduledMaintenanceLogService; update services index 2025-01-12 12:02:25 +00:00
Simon Larsen
87be913388 feat: enhance filtering logic to support numeric values in BaseModelTable 2025-01-12 11:52:33 +00:00
Simon Larsen
1a034abe96 feat: add NumberFilter component and integrate it into FiltersForm 2025-01-12 11:45:02 +00:00
Simon Larsen
19bdfd66b9 docs: update public API documentation to include overallStatus object details 2025-01-12 10:36:46 +00:00
Simon Larsen
982d051329 feat: implement mutex for incident creation and improve incident number handling 2025-01-12 10:35:04 +00:00
Simon Larsen
b66b1db2cb feat: add Incident Number column to IncidentsTable and IncidentView components 2025-01-12 10:19:38 +00:00
Simon Larsen
12e67a065d feat: add incidentNumber field to Incident model and implement data migration for existing incidents 2025-01-12 10:08:48 +00:00
Simon Larsen
1f3cdc08ac feat: add incidentNumber field to Incident model and implement logic for auto-incrementing incident numbers 2025-01-12 10:03:28 +00:00
Simon Larsen
5ebcba9f46 refactor: improve code readability in StatusPageAPI and Overview components 2025-01-10 14:44:31 +00:00
Simon Larsen
15e49c1b45 feat: implement overall monitor status calculation and update Overview component 2025-01-10 14:40:42 +00:00
Simon Larsen
b71a657dea feat: update API documentation to reflect POST method for status page endpoints 2025-01-09 18:38:38 +00:00
Simon Larsen
72e573bbc4 Merge branch 'release' 2025-01-09 14:32:47 +00:00
Simon Larsen
2113ffefd9 feat: refactor ScheduledMaintenanceLog model and migration for improved readability and structure 2025-01-09 14:32:43 +00:00
Simon Larsen
8da42884c2 feat: add documentation for using Monitor Secrets in custom and synthetic monitors 2025-01-09 14:30:24 +00:00
Simon Larsen
e14c54c6cc Merge branch 'release' 2025-01-08 19:48:16 +00:00
Simon Larsen
10a199f3f5 feat: add IncidentLog and ScheduledMaintenanceLog tables with migrations 2025-01-08 19:47:50 +00:00
Simon Larsen
2df97392b2 feat: enhance server monitor queries by integrating active project status checks 2025-01-08 18:54:06 +00:00
Simon Larsen
fcd2ecd118 feat: improve server monitor request handling by refining query and response logic 2025-01-08 18:41:41 +00:00
Simon Larsen
e2e5533229 Merge branch 'release' 2025-01-08 17:43:00 +00:00
Simon Larsen
cf01fce011 feat: add uninstall instructions for OneUptime in documentation 2025-01-08 17:18:51 +00:00
Simon Larsen
100152ecc1 feat: update API endpoint URLs in Public Status Page documentation 2025-01-08 17:12:45 +00:00
Simon Larsen
7b68c0b3c0 feat: refactor permission handling by extracting getFieldPermissions function 2025-01-07 18:59:34 +00:00
Simon Larsen
a7ad9b752b feat: fix Clickhouse configuration to use default port if not specified 2025-01-06 20:10:59 +00:00
Simon Larsen
2e3ed42723 feat: add Public Status Page API documentation 2025-01-06 12:43:29 +00:00
Simon Larsen
864e44d1a2 feat: add "Status Pages" section with link to Public API documentation 2025-01-06 12:40:52 +00:00
Simon Larsen
08cdb33e6e feat: remove redundant MYENVVAR environment variable from installer templates 2025-01-06 12:39:17 +00:00
Simon Larsen
27647f529f feat: rename workflow step to clarify NPM package publishing 2025-01-06 08:30:49 +00:00
Simon Larsen
4021650ea5 feat: add MYENVVAR environment variable to installer for amd64 template 2025-01-06 08:27:01 +00:00
Simon Larsen
6a9f76c656 feat: add environment variable MYENVVAR to installer for arm64 template 2025-01-06 08:26:43 +00:00
Simon Larsen
c3bba74107 feat: update product name for amd64 and remove unnecessary custom actions from installer 2025-01-03 17:15:25 +00:00
Simon Larsen
16b9d4794e feat: enhance error handling in SSO login process and improve response structure 2025-01-03 16:19:23 +00:00
Simon Larsen
22e16d5c5b feat: update documentation to include proxy configuration for agent setup 2025-01-03 15:28:37 +00:00
Simon Larsen
ef340e5fa0 feat: improve error logging for secret key validation and add proxy support for HTTP requests 2025-01-03 15:22:35 +00:00
Simon Larsen
20aff38458 feat: enhance agent configuration logging and save proxy URL 2025-01-03 15:08:57 +00:00
Simon Larsen
16a1051280 feat: add proxy configuration support to agent and update related functions 2025-01-03 14:55:16 +00:00
Simon Larsen
fe9d3d3020 Add proxy 2025-01-03 14:21:47 +00:00
Simon Larsen
a0c0041cbd feat: implement default logging to file with fallback to console 2025-01-03 13:31:59 +00:00
Simon Larsen
d755c62a51 chore: update README for build instructions and remove unused dependency 2025-01-03 10:04:11 +00:00
Simon Larsen
5756623f45 fix: improve error handling and logging for metrics ingestion and secret key verification 2025-01-03 09:47:35 +00:00
Simon Larsen
8247f504c4 fix: correct formatting of Redis IP family value in Helm chart templates 2025-01-01 18:46:25 +00:00
Simon Larsen
f05b0a15ee feat: add conditional logic for Redis IP family in Helm chart templates 2025-01-01 18:42:38 +00:00
Simon Larsen
1abf1df1ee refactor: format PORT initialization and improve import structure in Config and Index 2025-01-01 18:24:30 +00:00
Simon Larsen
f1afab0b52 feat: add configurable port for probe services and update initialization 2025-01-01 18:04:19 +00:00
Simon Larsen
a0d33bf9c1 feat: initialize server with a specific port using Port type in Probe 2025-01-01 17:03:13 +00:00
Simon Larsen
7c16fa7b42 refactor: update ClickhouseConfig to use Hostname type for host configuration 2025-01-01 16:37:12 +00:00
Simon Larsen
5ba526d12e refactor: remove outdated comments and improve context object in VMRunner 2025-01-01 16:11:19 +00:00
Simon Larsen
92174be7fd docs: enhance documentation with available objects in Playwright context and error handling for browser closure 2025-01-01 15:34:10 +00:00
Simon Larsen
ece451b660 refactor: remove unnecessary UI reference and streamline thank you message in installation templates 2024-12-24 21:09:36 +00:00
Simon Larsen
5ca740b43c feat: add custom action to set PATH environment variable and show thank you message during installation 2024-12-24 20:55:16 +00:00
Simon Larsen
70aae7c59a fix: update RedisIPFamily initialization to handle undefined environment variable 2024-12-24 20:13:21 +00:00
Simon Larsen
bb543ef010 Merge pull request #1773 from diabolocom/redis-ipv6
Feature: Allow to set Redis IP Family
2024-12-24 20:09:59 +00:00
Simon Larsen
64bba9246b Add thank you message and UI reference to installation templates 2024-12-24 20:07:08 +00:00
Simon Larsen
511987bd70 Add PATH environment variable setting in Windows installer templates 2024-12-24 20:03:40 +00:00
Jules Lefebvre
19162504c8 feat(helm): add externalRedis.ipFamily configuration
Add the `externalRedis.ipFamily` values to set `REDIS_IP_FAMILY environmental variable
2024-12-24 13:08:15 +01:00
Jules Lefebvre
96f5173fb9 feat(docker): allow to chose redis ip family
Add the `REDIS_IP_FAMILY` to the default docker environment variables
2024-12-24 13:08:04 +01:00
Jules Lefebvre
8704f47b44 feat(common/infra): allow to chose redis ip familly
Introduce a new environment variable (`REDIS_IP_FAMILY`) to choose witch version
of the IP protocol to use in Redis.
2024-12-24 13:06:39 +01:00
Simon Larsen
45803a8cd2 Fix path separators in build-msi.sh for consistency across platforms 2024-12-23 20:19:11 +00:00
Simon Larsen
c04c2a3563 Fix path separators in build-msi.sh for Windows compatibility 2024-12-23 19:10:41 +00:00
Simon Larsen
e61e7f3ba0 Update file identifiers in Windows installer templates for Infrastructure Agent 2024-12-23 18:43:33 +00:00
Simon Larsen
e438050f6e Fix path separators in build-msi.sh for Windows compatibility 2024-12-23 18:42:23 +00:00
Simon Larsen
9601bdec93 Update product names and descriptions in installer templates for consistency 2024-12-23 18:31:56 +00:00
Simon Larsen
c631dcfd44 Remove unnecessary whitespace in CustomSMTPTable component 2024-12-23 18:29:01 +00:00
Simon Larsen
4cabd2562a Reduce job duration from 1 minute to 30 seconds in InfrastructureAgent 2024-12-23 18:22:03 +00:00
Simon Larsen
09e997c104 Add noValueMessage prop to description fields across multiple components 2024-12-23 17:54:27 +00:00
Simon Larsen
eee787be1c Update description fields to be optional across multiple components 2024-12-20 17:22:05 +00:00
Simon Larsen
c9b0d4fbec Fix binary paths in build script for MSI to remove leading dot 2024-12-20 16:58:00 +00:00
Simon Larsen
946f82f978 Add debug logging to server monitor criteria and comparison functions 2024-12-20 16:23:34 +00:00
Simon Larsen
8fc3fe4a98 Increase offline check duration from 2 to 3 minutes in server monitor criteria 2024-12-20 16:16:25 +00:00
Simon Larsen
3d321a038b Add infrastructure agent deployment workflow with GoReleaser and artifact upload 2024-12-20 16:00:56 +00:00
Simon Larsen
ef06d47619 Fix binary paths in build script for MSI to remove leading dot 2024-12-20 15:55:46 +00:00
Simon Larsen
c8a966a2f7 Enhance server monitor functionality by adding serverMonitorResponse handling and improving hostname resolution 2024-12-20 15:48:32 +00:00
Simon Larsen
32ffdf9174 Consolidate package installation in build script for MSI 2024-12-20 14:54:22 +00:00
Simon Larsen
d482cb00a9 Fix typo in form step title from "Baisc Info" to "Basic Info" in SSO pages 2024-12-20 13:59:20 +00:00
Simon Larsen
6286b8bf4c Update output directory in build script for MSI to a more general path 2024-12-20 13:58:48 +00:00
Simon Larsen
b264f4a6b9 Add MSI image release step to GitHub Actions and update build script for dependencies 2024-12-20 12:10:58 +00:00
Simon Larsen
ec7d5fe5c2 Update ARM64 binary path in MSI build script to reflect new version structure 2024-12-20 12:08:47 +00:00
Simon Larsen
4b9d770030 Update Windows installer templates for arm64: enhance XML structure, update product details, and adjust file references 2024-12-19 20:38:24 +00:00
Simon Larsen
a8100e9b01 Refactor Windows installer template for arm64: update XML structure, improve metadata, and enhance component definitions 2024-12-19 20:37:35 +00:00
Simon Larsen
b43e892295 Update GoReleaser to v6.1.0 and increment version in configuration 2024-12-19 20:14:30 +00:00
Simon Larsen
663f151051 Add MSI build script and templates for Windows infrastructure agent 2024-12-19 19:44:39 +00:00
Simon Larsen
282373b654 Update server monitor criteria and request handling for accurate time tracking 2024-12-19 18:10:15 +00:00
Simon Larsen
43faeb6e32 Update CheckOnlineStatus to use three minutes ago for server monitor queries 2024-12-19 17:58:25 +00:00
Simon Larsen
8180fcf386 Update metric view data handling in MonitorMetrics component 2024-12-19 17:49:38 +00:00
Simon Larsen
e2cbd4d0ac Refactor loading and error handling in MonitorMetrics component 2024-12-19 17:31:27 +00:00
Simon Larsen
084c259197 Update report settings to clarify email notifications for subscribers 2024-12-17 19:16:04 +00:00
Simon Larsen
6ecd709f29 Add NFPM configuration for packaging and install step in release workflow 2024-12-17 18:56:27 +00:00
Simon Larsen
b935cef5a0 Fix typo in subscription confirmation message 2024-12-17 17:41:44 +00:00
Simon Larsen
1d5e838afa Update subscription success message to include email confirmation instructions 2024-12-17 17:35:38 +00:00
Simon Larsen
90ca4b68cf Add email notification for new subscribers and improve subscription confirmation handling 2024-12-17 15:01:26 +00:00
Simon Larsen
031bd26b09 Update subscription confirmation to use 'verification-token' query parameter 2024-12-17 14:56:42 +00:00
Simon Larsen
1296f37081 Enhance subscription confirmation handling by updating permission controls and modifying email notification options 2024-12-17 14:41:31 +00:00
Simon Larsen
ba1a707156 Remove outdated IP addresses from the configuration documentation 2024-12-17 13:08:41 +00:00
Simon Larsen
d670cca559 Add subscription confirmation feature and enhance Pill component with tooltip support 2024-12-17 13:00:23 +00:00
Simon Larsen
f7e31a4f04 Add subscription confirmation handling and enhance Pill component with tooltip support 2024-12-17 12:50:02 +00:00
Simon Larsen
bb7917551f Add confirmation functionality for status page subscriptions and update related templates 2024-12-17 12:42:22 +00:00
Simon Larsen
53238aee40 Refactor date handling in AggregateUtil and XAxisUtil, and improve error handling in MetricView and IncidentView components 2024-12-16 18:17:01 +00:00
Simon Larsen
187d41e25f Refactor RollingTimePicker and MetricMonitorStepForm for improved state management and performance 2024-12-16 17:59:18 +00:00
Simon Larsen
56f79a91c1 Enhance IncidentView component with HeaderAlert for date display and improve metrics card layout 2024-12-16 16:57:04 +00:00
Simon Larsen
4dd6b5f32e Add MetricViewData interface and update imports across components 2024-12-16 16:53:38 +00:00
Simon Larsen
f48a5a650c Add metricAggregationType handling to MetricMonitorOptions and enhance CriteriaFilter component 2024-12-16 16:23:57 +00:00
Simon Larsen
09a2a31d2b Refactor evaluation type handling in CompareCriteria and update MetricMonitorOptions interface 2024-12-16 16:14:23 +00:00
Simon Larsen
5fd60094ab Add monitorStep parameter to MetricMonitorCriteria and enhance metric alias handling 2024-12-16 15:59:59 +00:00
Simon Larsen
0cfc9fbb56 Improve data handling and code readability in various components 2024-12-16 15:38:29 +00:00
Simon Larsen
2fc6200c47 Update chartCssClass in MetricMonitor components for consistent styling 2024-12-16 11:11:51 +00:00
Simon Larsen
437e5e7004 Enhance MetricMonitorPreview with rolling time selection and modal for improved user interaction 2024-12-16 11:08:40 +00:00
Simon Larsen
22e2c1f25c Uncomment Metrics monitor type in MonitorTypeHelper for clarity 2024-12-16 10:34:30 +00:00
Simon Larsen
4799ed0434 Fix typo in alert configuration key for consistency in Helm chart 2024-12-16 10:26:18 +00:00
Simon Larsen
0848dfc5d6 Refactor EnvironmentConfig and StatusPageService for improved readability and consistency 2024-12-16 10:25:26 +00:00
Simon Larsen
88cf2c3cb0 Add configuration options to disable automatic alert and incident creation 2024-12-16 09:57:24 +00:00
Simon Larsen
ca3855d109 Remove commented-out Metrics monitor type from MonitorTypeHelper for cleaner code 2024-12-13 15:55:52 +00:00
Simon Larsen
36570f3944 Refactor APIRequestCriteria to improve readability of response code check 2024-12-13 14:09:23 +00:00
Simon Larsen
df10a1900d Update APIRequestCriteria to include responseCode check for ResponseStatusCode validation 2024-12-13 14:06:43 +00:00
Simon Larsen
eca3408598 Update APIResponse interface to allow statusCode to be undefined for better error handling 2024-12-13 14:04:57 +00:00
Simon Larsen
9d1a2c40c7 Refactor MetricMonitorStepForm and MetricView to optimize state management and enhance data handling 2024-12-12 13:57:46 +00:00
Simon Larsen
cd58b72a9c Refactor MetricMonitorStepForm to eliminate unnecessary state management and streamline onChange handling 2024-12-12 13:17:00 +00:00
Simon Larsen
ceead6eaba Refactor StartAndEndDate and CriteriaFilter components for improved readability and consistency in type usage 2024-12-12 13:02:29 +00:00
Simon Larsen
45a665b004 Refactor DateFilter and MetricView components for consistency in value prop usage; improve formatting in various files 2024-12-12 12:58:20 +00:00
Simon Larsen
2dd1cd8453 Update chartCssClass for improved styling in MetricMonitorStepForm 2024-12-12 12:03:54 +00:00
Simon Larsen
8152a7f7ea Add hideCardInCharts prop and chartCssClass for enhanced styling in MetricMonitorStepForm 2024-12-12 12:02:09 +00:00
Simon Larsen
55d962eed7 Add chartCssClass prop to ChartGroup, MetricCharts, and MetricView for customizable styling 2024-12-12 11:55:48 +00:00
Simon Larsen
ebf5c83358 Merge branch 'master' of github.com:OneUptime/oneuptime 2024-12-12 11:28:57 +00:00
Simon Larsen
69e5fba5ff Add monitorStep prop to various components for improved data handling in monitoring forms 2024-12-12 11:15:00 +00:00
Simon Larsen
139ee62106 Add FieldLabelElement for metric selection in MetricMonitorStepForm 2024-12-12 10:36:47 +00:00
Simon Larsen
2d89431dc1 Merge pull request #1765 from ThoSt81/patch-1
Update SubscribedToStatusPage.hbs
2024-12-12 10:29:07 +00:00
ThoSt
d506e658f0 Update SubscribedToStatusPage.hbs
Remove InfoBlock which is identical to EmailTitle and thus messing up the template when send to the new subscriber.
2024-12-11 15:49:42 +01:00
Simon Larsen
180d02c53d Refactor MonitorMetrics and MetricExplorer components to streamline state management with MetricViewData and improve code readability 2024-12-11 14:01:31 +00:00
Simon Larsen
e500886b12 Refactor MetricMonitor components to utilize MetricViewData for improved state management and data handling 2024-12-11 13:57:15 +00:00
Simon Larsen
7f7bcbc0a3 Add FieldLabelElement for time range selection and refactor MetricView data handling in MetricExplorer 2024-12-11 13:51:43 +00:00
Simon Larsen
397231f1df Refactor MetricMonitorCriteria to improve type safety and enhance alias handling in data processing 2024-12-10 22:44:58 +00:00
Simon Larsen
f575afa151 Enhance MetricMonitorCriteria to support metric alias handling and improve data processing logic 2024-12-10 22:38:14 +00:00
Simon Larsen
b26cf4e876 Enhance Metric Monitor functionality by introducing MetricMonitorCriteria, updating MetricMonitorResponse, and refining telemetry monitoring logic 2024-12-10 19:37:24 +00:00
Simon Larsen
450488eb4f Refactor code for improved readability and consistency across Monitor components 2024-12-10 14:37:31 +00:00
Simon Larsen
14d3228786 Add Metric Monitor functionality with Rolling Time selection and preview 2024-12-10 14:32:54 +00:00
Simon Larsen
811fd24cd5 Merge branch 'master' of github.com:OneUptime/oneuptime 2024-12-10 10:25:21 +00:00
Simon Larsen
098fb5be78 Merge branch 'metrics-monitor' 2024-12-10 10:25:10 +00:00
Simon Larsen
837d065b81 Merge pull request #1761 from WillDaSilva/patch-1
Remove trailing whitespace in `fluentbit.md`
2024-12-10 10:16:09 +00:00
Simon Larsen
fe2b001c6d Improve error handling in OTelIngest API to provide clearer feedback on failures 2024-12-09 18:23:17 +00:00
Simon Larsen
b08c047da7 Add early empty success response to prevent timeouts in OTelIngest API 2024-12-09 18:12:50 +00:00
Simon Larsen
79ff8b1f82 Enable debug logging in telemetry configuration for improved diagnostics 2024-12-09 17:59:58 +00:00
Simon Larsen
7da47d6e16 Refactor response handling in OTelIngest API to ensure early success response is sent consistently 2024-12-09 17:08:50 +00:00
Simon Larsen
6e6f3c6c38 Add MetricMonitor support with criteria checks and response type 2024-12-09 17:03:24 +00:00
Simon Larsen
b3b3d9a0b7 Merge branch 'master' into metrics-monitor 2024-12-09 16:19:26 +00:00
Simon Larsen
16f0fe145a Enhance probe monitoring by introducing configurable retry limit and updating logic 2024-12-09 16:18:43 +00:00
Simon Larsen
eea7209aaf Remove unused imports and clean up whitespace in OTelIngest API 2024-12-09 15:38:51 +00:00
Simon Larsen
c4c93f1cc5 Refactor error handling in data processing module for improved clarity and maintainability 2024-12-09 15:34:25 +00:00
Simon Larsen
0d19f56519 Refactor monitor type helper methods and update side menu to reflect changes in metrics handling 2024-12-09 12:58:22 +00:00
Simon Larsen
85d0ded200 Remove commented-out telemetry indexing and billing logic from OTelIngest API 2024-12-09 12:52:27 +00:00
Simon Larsen
c5134f0dd7 Add default value for PROBE_MONITOR_RETRY_LIMIT in probe.yaml and update values.yaml 2024-12-09 10:32:55 +00:00
Simon Larsen
9e117f34d4 Add PROBE_MONITOR_RETRY_LIMIT environment variable and update monitor retry logic 2024-12-09 10:28:22 +00:00
Will Da Silva
72f994d079 Remove trailing whitespace in fluentbit.md 2024-12-06 19:31:16 -05:00
Simon Larsen
4a6edfa660 Enhance logging for mutex acquisition and release in Monitor API 2024-12-06 17:43:52 +00:00
Simon Larsen
a20c05adb2 Merge branch 'master' into release 2024-12-06 17:39:33 +00:00
Simon Larsen
a286aba432 Refactor Semaphore class to streamline lockOptions initialization 2024-12-06 17:39:12 +00:00
Simon Larsen
a3f1302e37 Add lockTimeout and acquireTimeout options in Monitor API semaphore 2024-12-06 17:36:17 +00:00
Simon Larsen
9393388cc5 Add acquireTimeout option and improve lock options in Semaphore class 2024-12-06 17:34:21 +00:00
Simon Larsen
d249fe16d9 Add Fluent Bit integration documentation and image tiles to Telemetry section 2024-12-06 16:33:11 +00:00
Simon Larsen
7f6223f4c8 Remove debugger statement and improve code formatting in telemetry ingestion 2024-12-06 12:04:14 +00:00
Simon Larsen
dc5e2c0d40 Update Fluent Bit documentation and configuration for OneUptime telemetry ingestion 2024-12-06 11:58:40 +00:00
Simon Larsen
26f6a14e93 Update OpenTelemetry configuration in Fluent Bit to use new host and secure settings 2024-12-05 18:21:53 +00:00
Simon Larsen
588de5ad27 Add Fluent Bit documentation and update configuration for telemetry ingestion 2024-12-05 18:06:23 +00:00
Simon Larsen
8734938a82 Enhance telemetry configuration in Fluent Bit and update OpenTelemetry request handling 2024-12-05 17:55:59 +00:00
Simon Larsen
06e7228041 Add HTTP input to Fluent Bit configuration and expose port 8889 for telemetry ingestion 2024-12-05 14:43:39 +00:00
Simon Larsen
38ad431b17 Update Fluent Bit Docker configuration and remove obsolete YAML file 2024-12-04 20:09:16 +00:00
Simon Larsen
e0f5d8f1f2 Add Fluent Bit configuration and update probe intervals in Helm chart 2024-12-04 19:23:59 +00:00
Simon Larsen
e947d21060 Add random sleep interval to FetchList worker to reduce server load 2024-12-04 15:57:02 +00:00
Simon Larsen
41d347a1a7 Increase Node.js memory limit in start script for improved performance 2024-12-04 13:59:32 +00:00
Simon Larsen
5dc6ab6bb2 Update Alert and Incident metric type strings to use hyphens for consistency 2024-12-04 13:23:16 +00:00
Simon Larsen
3492e54a9b Refactor error logging and improve code formatting in AlertStateTimelineService and IncidentService 2024-12-04 13:12:49 +00:00
Simon Larsen
adf92f3dc9 Add Alert and Incident metric types, enhance Metric model, and improve error logging in services 2024-12-04 13:02:17 +00:00
Simon Larsen
02b9f77e7c Add new service types to Metric model and clean up IncidentService 2024-12-04 11:35:25 +00:00
Simon Larsen
396f435755 Add cron jobs to delete incident and alert metrics older than 180 days 2024-12-04 11:16:53 +00:00
Simon Larsen
5796db03f9 Add Incident and Alert service types to Metric model 2024-12-04 11:03:16 +00:00
Simon Larsen
4029d72967 Send early empty success response in OTelIngest to prevent timeouts 2024-12-04 10:39:54 +00:00
Simon Larsen
8d2cf500a0 Update OpenTelemetry endpoint in documentation for accuracy 2024-12-04 10:34:01 +00:00
Simon Larsen
5bfc954076 Refactor isSaving initialization in DashboardToolbar for clearer logic 2024-12-03 18:07:44 +00:00
Simon Larsen
769f468273 Add Loader component to DashboardToolbar for improved saving feedback 2024-12-03 18:04:21 +00:00
Simon Larsen
8d75128603 Add debug logging for updated items in DatabaseService and improve ObjectID equality check 2024-12-03 17:59:16 +00:00
Simon Larsen
ba4795e4b3 Serialize dashboardViewConfig in DashboardViewer for consistent data handling 2024-12-03 17:43:53 +00:00
Simon Larsen
65f9f7c830 Fix key assignment in MoreMenuItem to ensure unique keys for list items 2024-12-03 17:21:28 +00:00
Simon Larsen
99e56f9312 Fix mouse event coordinates in DashboardBaseComponent for accurate positioning 2024-12-03 17:14:36 +00:00
Simon Larsen
b380e6d770 Refactor ConfirmModal and DashboardToolbar for improved code readability and consistency 2024-12-03 16:36:02 +00:00
Simon Larsen
9d0add605d Add closeButtonText prop to modal components; implement confirmation modal for unsaved changes in DashboardToolbar 2024-12-03 16:31:32 +00:00
Simon Larsen
d4737841ce Enhance DashboardChartComponent to improve error handling and display an icon on error; refactor metric query configuration for better readability 2024-12-03 16:25:11 +00:00
Simon Larsen
d6f9971cb6 Refactor DashboardChartComponent and DashboardValueComponent for improved readability; add unit display in DashboardValueComponent 2024-12-03 14:31:22 +00:00
Simon Larsen
8e4733b72f Refactor Dashboard components and BasicForm for improved state management and code readability 2024-12-03 14:18:35 +00:00
Simon Larsen
07c387289a Enhance BasicForm and ArgumentsForm to support dynamic values; update Dashboard components to use ComponentLoader for improved loading state handling 2024-12-03 13:57:05 +00:00
Simon Larsen
956f786d1a Merge branch 'master' of github.com:OneUptime/oneuptime 2024-12-03 13:15:32 +00:00
Simon Larsen
56b2fbfb77 Refactor DashboardTextComponent and DashboardToolbar to improve layout and remove fullscreen state management; enhance DashboardViewer for better fullscreen handling 2024-12-03 13:15:30 +00:00
Simon Larsen
bba67afc36 Merge pull request #1758 from golyalpha/patch-1
Fix alert name links opening invalid/broken page
2024-12-03 13:11:56 +00:00
golyalpha
d1dd0d7774 Update Alert.tsx
Fix route on Alert name link in list of alerts
2024-12-02 21:06:17 +01:00
Simon Larsen
c7c6a54155 Enhance DashboardValueComponent and DashboardTextComponent for improved layout and error handling; add metric query configuration and adjust title height calculation 2024-12-02 18:29:10 +00:00
Simon Larsen
12fc9863d2 Refactor DashboardTextComponent and DashboardValueComponent to use consistent decimal notation for height calculations, improving code clarity 2024-12-02 18:16:49 +00:00
Simon Larsen
4ec718a966 Update DashboardValueComponent to dynamically set font sizes based on component height for improved text display 2024-12-02 18:14:50 +00:00
Simon Larsen
91b2a6e44f Update DashboardTextComponent to adjust text height calculation and apply it as font size for better text display 2024-12-02 18:13:11 +00:00
Simon Larsen
aa2e79bd82 Refactor MetricAlias component layout for improved responsiveness; adjust input widths for better alignment 2024-12-02 18:06:58 +00:00
Simon Larsen
cc62b26002 Refactor DashboardChartComponent and related components for improved code readability and maintainability; add optional title and description properties for enhanced chart configuration 2024-12-02 18:03:38 +00:00
Simon Larsen
6113b10c74 Update DashboardChartComponent to support dynamic legend and legendUnit properties for improved chart configuration 2024-12-02 18:00:57 +00:00
Simon Larsen
52a952f41e Add legendUnit property to DashboardChartComponent and MetricExplorer for enhanced chart configuration 2024-12-02 17:57:34 +00:00
Simon Larsen
257b4283e1 Add legend and legendUnit properties to MetricAliasData; update DashboardChartComponent and related components for legend display and configuration 2024-12-02 17:55:39 +00:00
Simon Larsen
31b4eba73f Update MetricAliasData to allow optional properties; enhance DashboardChartComponent to include chart title and description 2024-12-02 17:46:28 +00:00
Simon Larsen
0dec6255f6 Add optional properties for chart customization in DashboardChartComponent; enhance error handling in Dashboard components 2024-12-02 17:38:00 +00:00
Simon Larsen
baabf84951 Refactor Dashboard components for improved readability; update DashboardTextComponent to dynamically set height and fix formatting in DashboardValueComponent 2024-12-02 17:16:12 +00:00
Simon Larsen
359c36e023 Refactor MetricCharts and LineChart components for improved readability; update DashboardChartComponent to handle dynamic height and adjust DashboardValueComponent for new metric query configuration 2024-12-02 16:37:32 +00:00
Simon Larsen
285fe7f524 Add heightInPx prop to ChartGroup, LineChart, and MetricCharts for dynamic height adjustment 2024-12-02 15:57:50 +00:00
Simon Larsen
093e8e5591 Refactor Dashboard components for improved readability and add hideCard prop to ChartGroup and MetricCharts 2024-12-02 15:42:34 +00:00
Simon Larsen
73ce957b57 Enhance MonitorResourceUtil to include monitorName in attributes and improve error handling in Dashboard components 2024-12-02 15:33:37 +00:00
Simon Larsen
5dbb80457a Fix typos in data retention references across multiple views 2024-11-30 19:02:26 +00:00
Simon Larsen
d53b2d0e1c Add dashboardStartAndEndDate prop to DashboardCanvas and DashboardViewer components 2024-11-29 19:20:53 +00:00
Simon Larsen
c4256a0dea Refactor Dashboard components for improved code consistency and readability 2024-11-29 19:17:47 +00:00
Simon Larsen
b8fc933acb Add start and end date handling to Dashboard components and implement metric result fetching 2024-11-29 19:03:34 +00:00
Simon Larsen
45d447bf2c Add ColorSwatch enum and update Dashboard date range handling 2024-11-29 18:13:58 +00:00
Simon Larsen
a0400be8cd Add start and end date selection functionality to Dashboard components 2024-11-29 17:29:12 +00:00
Simon Larsen
3688381d4a Add max height calculation to DashboardCanvas for improved rendering logic 2024-11-29 13:03:31 +00:00
Simon Larsen
1f5287c2e6 Refactor DuplicateModel component in Settings page for improved readability and consistency 2024-11-29 12:54:17 +00:00
Simon Larsen
3e97d6bba1 Add DuplicateModel component to Settings page for dashboard duplication functionality 2024-11-29 12:50:25 +00:00
Simon Larsen
32c0cbc4ad Add Settings page and update routing and breadcrumbs for dashboard 2024-11-29 12:47:34 +00:00
Simon Larsen
90f267105f Add MetricViewData interface and implement MetricCharts component for enhanced metrics visualization 2024-11-28 21:43:38 +00:00
Simon Larsen
eaa9a5f1a0 Make metricAliasData optional in MetricView component for improved safety and handling 2024-11-28 14:11:13 +00:00
Simon Larsen
70da661041 Update MetricQueryConfigData to make metricAliasData optional and refactor ArgumentsForm and MetricQueryConfig components for improved type handling and conditional rendering 2024-11-28 13:27:31 +00:00
Simon Larsen
b307a74319 Refactor dashboard component argument types and update metrics handling in DashboardCanvas and ComponentSettingsSideOver 2024-11-28 13:16:44 +00:00
Simon Larsen
5c2fa28fff Refactor dashboard components to utilize JSONFunctions for deserialization of dashboard view configurations 2024-11-28 11:50:15 +00:00
Simon Larsen
2f8495e5b5 Add telemetry services loading and filtering to TraceTable component 2024-11-27 13:19:15 +00:00
Simon Larsen
d3d9c46812 Add exception handling to SpanViewer component with detailed display of exceptions 2024-11-27 13:05:59 +00:00
Simon Larsen
9e50d068db Enhance Clickhouse connection handling with improved error responses and status checks 2024-11-27 11:24:30 +00:00
Simon Larsen
7232a3142c Remove unused APIException import from CRUD components for cleaner code 2024-11-27 10:43:19 +00:00
Simon Larsen
a1ca2e3f37 Refactor API component error handling to consistently use errorPort and return detailed error messages 2024-11-27 10:39:40 +00:00
Simon Larsen
a5d993b999 Remove unused load distribution logic in Monitor API 2024-11-27 10:24:36 +00:00
Simon Larsen
854be1ddeb Swap debug ports for OpenTelemetryIngest and Fluent Ingest configurations in launch.json 2024-11-26 17:55:10 +00:00
Simon Larsen
fe5b93f66e Add Project ID field to APIKeyView for enhanced project tracking 2024-11-26 17:09:31 +00:00
Simon Larsen
f259ddecd5 Update TraceTable terminology from "Trace" to "Span" for consistency in terminology 2024-11-26 17:07:52 +00:00
Simon Larsen
680be0e468 Add conditional fetching of aggregated results in MetricView based on hideQueryElements prop 2024-11-26 16:38:49 +00:00
Simon Larsen
488a2b0b57 Add modulo function to QueryHelper and random number generator to NumberUtil for load distribution in monitor fetching 2024-11-26 15:38:00 +00:00
Simon Larsen
e0871e6b16 Remove unnecessary blank lines in QueryHelper class for improved code clarity 2024-11-26 14:05:26 +00:00
Simon Larsen
ab5acdef09 Refactor XAxis precision methods for improved date rounding and remove random sorting method from QueryHelper 2024-11-26 14:04:30 +00:00
Simon Larsen
002c23b2a5 Disable Clickhouse status check in InfrastructureStatus for improved reliability 2024-11-26 13:17:10 +00:00
Simon Larsen
b10134fb30 Add random sorting method to QueryHelper and apply it in Monitor probe fetching 2024-11-26 09:04:19 +00:00
Simon Larsen
93027ec0ae Refactor string to number conversion in AnalyticsDatabaseService for improved readability 2024-11-25 19:10:36 +00:00
Simon Larsen
481b09531f Convert string values to numbers in AnalyticsDatabaseService for aggregate column processing 2024-11-25 19:05:20 +00:00
Simon Larsen
4da1dd3f6b Refactor incomingRequest initialization in CheckHeartbeat for clarity 2024-11-25 18:29:14 +00:00
Simon Larsen
661d44d6b0 Add incomingMonitorRequest to CheckHeartbeat and streamline request initialization 2024-11-25 18:28:30 +00:00
Simon Larsen
d6dacb6493 Update TelemetryException model to use VeryLongText type for message fields and add migration for database schema changes 2024-11-25 16:58:08 +00:00
Simon Larsen
0ad5ee5997 Prevent owner notification for the first monitor status timeline entry 2024-11-25 15:05:01 +00:00
Simon Larsen
5ad8f00388 Refactor validation messages for improved formatting and readability 2024-11-25 14:32:58 +00:00
Simon Larsen
450311de3c Refactor validation messages for improved readability and add visibility check for field validation 2024-11-25 14:32:25 +00:00
Simon Larsen
1bc4f07fa3 Update features table to include new items and adjust existing entries 2024-11-25 14:05:58 +00:00
Simon Larsen
d091c93bfc Increase default navigation timeout for status checks and add new probe ingest tests 2024-11-25 14:00:53 +00:00
Simon Larsen
2ff9b47f1c Refactor retryCount initialization across multiple modules for consistency 2024-11-25 13:53:43 +00:00
Simon Larsen
6b470e671f Implement retry mechanism for infrastructure status checks across multiple modules 2024-11-25 13:49:49 +00:00
Simon Larsen
14c9174e24 Add debug logging for database operations and update feature table with new items 2024-11-25 13:08:47 +00:00
Simon Larsen
e4beb13982 Increase timeoutSeconds for liveness and readiness probes in values.yaml 2024-11-23 07:48:29 +00:00
Simon Larsen
2b006e1765 Update Fluentd configuration with new endpoint and token for production environment 2024-11-22 18:41:37 +00:00
Simon Larsen
f45c7f8d30 Add Nginx location for incoming-request-ingest with proxy settings 2024-11-22 18:24:47 +00:00
Simon Larsen
e3a2f95fc2 Add Nginx locations for fluent-ingest and open-telemetry-ingest with proxy settings 2024-11-22 18:18:11 +00:00
Simon Larsen
d27c161665 Add OFF log level to ConfigLogLevel enum and update documentation in values.yaml 2024-11-22 13:00:06 +00:00
Simon Larsen
b9d6a69f00 Fix typo in upstream block name for incoming requests in Nginx configuration 2024-11-22 12:08:08 +00:00
Simon Larsen
fac334d58a Remove probe port configuration and update related references in Helm chart and Docker setup 2024-11-22 11:37:52 +00:00
Simon Larsen
a4913cc5bf Rename host option to url in ClickhouseConfig for clarity 2024-11-22 10:32:15 +00:00
Simon Larsen
0ad1a34e10 Remove unused import for EJS in FluentIngest index file 2024-11-22 10:28:52 +00:00
Simon Larsen
325fa0eb7a Add SERVER_OPEN_TELEMETRY_INGEST_HOSTNAME to Helm template and update tag replacement in change-release-to-test-tag script 2024-11-22 10:23:56 +00:00
Simon Larsen
c02c1e6808 Update test-release workflow to replace deprecated Docker image deployment step 2024-11-22 10:08:58 +00:00
Simon Larsen
96a4a17320 Remove probe-ingest Docker image deployment steps from test-release workflow 2024-11-21 20:10:19 +00:00
Simon Larsen
23c169c6a3 Update Helm templates to use consistent naming for replicaCount across ingest services 2024-11-21 19:58:47 +00:00
Simon Larsen
6c4a4cad50 Fix replicaCount reference in probe-ingest Helm template 2024-11-21 19:11:27 +00:00
Simon Larsen
34c1af08db Fix import path for FluentIngestAPI in Index.ts 2024-11-21 19:05:12 +00:00
Simon Larsen
128aec9869 Remove protobufjs dependency from ProbeIngest and IncomingRequestIngest; update probe.yaml for consistent service URL structure 2024-11-21 18:13:42 +00:00
Simon Larsen
4fc2029a61 Add backward compatibility for /ingestor route in Nginx and ProbeIngest; update Probe configuration for new URL structure 2024-11-21 17:53:35 +00:00
Simon Larsen
815ae7161d Rename Ingestor to ProbeIngest; update configurations, routes, and Docker support; add new request types and workflows 2024-11-21 17:18:22 +00:00
Simon Larsen
3a1f5c7120 Refactor OpenTelemetry Ingest Dockerfile and configuration; update environment variables and docker-compose for new service integration 2024-11-21 17:08:35 +00:00
Simon Larsen
eec51342de Add configuration files for OpenTelemetry Ingest, including .gitignore, .dockerignore, and nodemon.json; refactor code for consistency and clarity 2024-11-21 16:42:29 +00:00
Simon Larsen
945cef653c Add Incoming Request Ingest service with configuration, Docker support, and tests 2024-11-21 14:41:37 +00:00
Simon Larsen
93154aabc7 Remove FluentIngestAPI from Ingestor routes 2024-11-21 13:36:29 +00:00
Simon Larsen
9244e49e6b Add Fluent Ingest service with configuration, Docker support, and tests 2024-11-21 13:31:11 +00:00
Simon Larsen
74e43f0526 Add debug logging for Clickhouse connection status and fix indentation in nginx.yaml 2024-11-21 12:34:57 +00:00
Simon Larsen
1fba734fd0 Refactor health check configurations to use structured probes in values.yaml and templates 2024-11-21 12:11:17 +00:00
Simon Larsen
197e4e67e0 Merge pull request #1750 from golyalpha/master
Add startupProbe to deployments with healthchecks
2024-11-21 10:56:58 +00:00
Radek Goláň jr.
02afbb19be add startupProbe to deployments with healtchecks 2024-11-21 09:00:27 +01:00
Simon Larsen
972e6cb98f Add logging for infrastructure status checks in Status.ts 2024-11-20 21:33:32 +00:00
Simon Larsen
b14f918d59 Update ClickHouse dependency version to 6.3.2 in Chart.yaml and Chart.lock 2024-11-20 18:58:02 +00:00
Simon Larsen
c05d4a0eea Update live check method in StatusAPI to use liveCheck instead of readyCheck 2024-11-20 18:56:07 +00:00
Simon Larsen
3732a5c95f Remove unnecessary blank line in ProbeAuthorization class 2024-11-20 17:33:40 +00:00
Simon Larsen
1e0f6ff558 Add comment to explain URI encoding in getClusterKey method 2024-11-20 15:29:36 +00:00
Simon Larsen
6a361e5b87 Remove unused cluster key retrieval method from ProbeAuthorization middleware 2024-11-20 15:28:00 +00:00
Simon Larsen
c94ac75a6f Refactor ClusterKeyAuthorization to use class reference for cluster key retrieval 2024-11-20 15:14:34 +00:00
Simon Larsen
b49e40780a Remove unused ObjectID import from ClusterKeyAuthorization middleware 2024-11-20 14:29:48 +00:00
Simon Larsen
dd01fa0a3d Refactor ClusterKeyAuthorization to use a dedicated method for cluster key retrieval and simplify type handling 2024-11-20 14:26:36 +00:00
Simon Larsen
a2218b01d0 Add resourcesPreset configuration to Clickhouse settings in values.yaml 2024-11-20 08:36:35 +00:00
Simon Larsen
8d9d83d679 Downgrade Clickhouse dependency version to 6.1.0 in Chart.yaml and Chart.lock 2024-11-19 17:51:46 +00:00
Simon Larsen
546c74297f Consolidate Clickhouse client imports for improved readability 2024-11-19 17:21:16 +00:00
Simon Larsen
9fc1e73fd3 Add additional Clickhouse service ports for TCP, MySQL, and PostgreSQL 2024-11-19 13:49:32 +00:00
Simon Larsen
64e713f503 Update Clickhouse dependency version to 6.3.2 in Chart.yaml and Chart.lock 2024-11-19 11:26:01 +00:00
Simon Larsen
f254209410 Refactor AnalyticsDatabaseService and ClickhouseDatabase imports for improved clarity 2024-11-18 19:54:30 +00:00
Simon Larsen
2f738f8b58 Refactor Clickhouse configuration imports for improved clarity and type usage 2024-11-18 18:57:36 +00:00
Simon Larsen
e2da9b5bc3 Update @clickhouse/client and @clickhouse/client-common to version 1.8.1 2024-11-18 18:51:45 +00:00
Simon Larsen
04bfaf754a Add health check endpoints for global cache, analytics database, and database 2024-11-18 18:42:31 +00:00
Simon Larsen
ab328b0987 Simplify EmailTitle helper usage in SubscriberIncidentStateChanged template 2024-11-18 15:08:17 +00:00
Simon Larsen
521844a5ff Add liveness and readiness probe configuration options to Helm chart templates 2024-11-18 15:07:51 +00:00
Simon Larsen
c4096e7000 Refactor ArgumentsForm, DashboardView, and DashboardToolbar components for improved readability and consistency 2024-11-15 17:14:56 +00:00
Simon Larsen
8afe8cf7fb Comment out NavBarItem for Dashboards in NavBar component 2024-11-15 17:13:52 +00:00
Simon Larsen
fa8b52fa83 Add DashboardViewConfig type and enhance dashboard saving functionality with loading state 2024-11-15 17:13:18 +00:00
Simon Larsen
a8baa76096 Refactor Dashboard components by renaming and replacing DashboardUnit with BlankDashboardUnit for improved clarity and structure 2024-11-15 14:54:09 +00:00
Simon Larsen
8d5cef72b3 Refactor formatting and spacing in Dashboard components for improved readability 2024-11-15 14:07:51 +00:00
Simon Larsen
ef0f0ffa0b Refactor SideOver and ComponentSettingsSideOver components for improved structure and readability 2024-11-15 14:04:12 +00:00
Simon Larsen
eef4e19dc9 Add component deletion functionality and improve selection handling in Dashboard components 2024-11-15 12:45:04 +00:00
Simon Larsen
5af41891dc Enhance DashboardCanvas and DashboardViewer components with selection handling and state management 2024-11-15 12:08:37 +00:00
Simon Larsen
4539b9d381 Update permissions for ScheduledMaintenanceTemplate to include additional roles for update access 2024-11-15 11:46:32 +00:00
Simon Larsen
c1aadd7ce7 Comment out NavBarItem for "Dashboards" in the DashboardNavbar component 2024-11-14 18:27:06 +00:00
Simon Larsen
3b76b92fcb Refactor interface definitions and formatting for improved consistency and readability 2024-11-14 18:21:55 +00:00
Simon Larsen
fe0dc51bef Add DashboardComponentType enum and refactor dashboard component interfaces for improved type safety 2024-11-14 18:10:03 +00:00
Simon Larsen
e279da47bf Refactor dashboard component interfaces and update argument structures for improved configuration 2024-11-14 17:50:36 +00:00
Simon Larsen
5d93000484 Add new metric-related interfaces and update component props for enhanced metrics configuration 2024-11-14 15:21:26 +00:00
Simon Larsen
b826a78700 Add minimum width and height properties to dashboard components for better layout control 2024-11-14 12:47:48 +00:00
Simon Larsen
eded26d92c Add support for HEAD requests in WebsiteMonitor and update request handling 2024-11-13 16:44:42 +00:00
Simon Larsen
517d00dc9a Integrate Playwright for browser automation and add viewport handling in SyntheticMonitor 2024-11-13 16:13:46 +00:00
Simon Larsen
96752f1473 Update aggregation intervals and refactor MetricView component for improved date handling and performance 2024-11-13 13:04:44 +00:00
Simon Larsen
bbc2e306f4 Add waitUntil option to page.setContent for improved reliability in screenshot capture 2024-11-13 12:14:26 +00:00
Simon Larsen
03e063c35b Merge pull request #1746 from OneUptime/take-screenshot-on-incident-create
Take screenshot on incident create
2024-11-13 12:07:36 +00:00
Simon Larsen
4c6c1381c8 Remove unnecessary blank line in MonitorIncident class for improved code readability 2024-11-13 12:07:12 +00:00
Simon Larsen
4814451277 Remove unused imports and clean up screenshot handling in MonitorResourceUtil 2024-11-13 12:00:51 +00:00
Simon Larsen
0c7b2ead8c Refactor migration files to drop specific indexes and improve code formatting; add keys to MoreMenuItem components in UI 2024-11-13 11:22:23 +00:00
Simon Larsen
6c6b80d3c8 Remove unused index decorators from IncidentStateTimeline and MonitorStatusTimeline models, and add migrations to drop specific indexes 2024-11-12 18:21:06 +00:00
Simon Larsen
76f9537002 Refactor migration files to improve index management and clean up MonitorIncident and MonitorResource utility classes 2024-11-12 17:47:07 +00:00
Simon Larsen
c0994ff055 Remove unused index decorators from Alert and Incident models, and add new migration files to drop specific indexes 2024-11-12 17:43:23 +00:00
Simon Larsen
86694d9627 Add Playwright dependency and update Dockerfile for installation 2024-11-12 15:17:52 +00:00
Simon Larsen
aa95f89b49 Merge pull request #1745 from Hubelia/fix_typo_certificate_provisioning
Fix typo in certificate provisioning
2024-11-12 14:41:27 +00:00
RegisHubelia
628b971dc9 Fix automatically typo in certificate provisioning currently set to automatiucally 2024-11-12 08:25:03 -05:00
Simon Larsen
ae002c0d88 Add validation for start and end date in MetricView to prevent loading without valid dates 2024-11-12 11:42:25 +00:00
Simon Larsen
a98fc6f784 Add Pill component for enabled/disabled status display in Workflow views 2024-11-11 18:43:46 +00:00
Simon Larsen
5c459eede8 Refactor metric handling by removing MonitorMetricsByMinute references, adding service type to metrics, and implementing a job to delete old monitor metrics 2024-11-11 18:06:31 +00:00
Simon Larsen
d9d2b615d2 Refactor code for consistency by standardizing property names and improving formatting across various components 2024-11-11 17:24:05 +00:00
Simon Larsen
86e6bca5e1 Enhance analytics functionality by adding groupBy support in various models and components 2024-11-11 17:01:35 +00:00
Simon Larsen
7cfff4787c Remove unnecessary whitespace and improve formatting in MonitorMetricType utility 2024-11-11 10:16:48 +00:00
Simon Larsen
491a8f05bc Update monitor metric units for consistency and improve metric view legend handling 2024-11-07 18:36:27 +00:00
Simon Larsen
f67f1a64bd Add utility method to determine aggregation type by monitor metric type 2024-11-07 17:48:35 +00:00
Simon Larsen
a31ef122a3 Refactor MetricFilter component for improved readability in metric name dropdown options 2024-11-07 17:45:34 +00:00
Simon Larsen
0a82d940fd Add MetricNameAndUnit interface and implement metrics view in Monitor section 2024-11-07 17:26:31 +00:00
Simon Larsen
6ec658b9a5 Refactor MonitorMetricsElement component for improved code readability 2024-11-07 15:52:05 +00:00
Simon Larsen
4e8de2303f Refactor database service to handle date columns and update monitor metrics querying logic 2024-11-07 14:55:16 +00:00
Simon Larsen
19625d6cef Implement MonitorMetricsElement component for dynamic metric querying and visualization 2024-11-07 13:16:44 +00:00
Simon Larsen
e346b12011 Add MonitorMetrics component and MonitorMetricType enum for enhanced monitoring capabilities 2024-11-06 17:40:16 +00:00
Simon Larsen
2b11f2f2b8 Add MonitorMetricType enum and update monitor metric naming conventions 2024-11-06 16:58:45 +00:00
Simon Larsen
3fb62cb358 Update Metric model description for clarity on service ID usage 2024-11-06 15:26:57 +00:00
Simon Larsen
108dfaccf8 Remove MonitorMetricsByMinute model and related service and job files 2024-11-06 13:42:25 +00:00
Simon Larsen
eb20a3c9a2 Comment out NavBarItem for "Dashboards" in the navigation bar 2024-11-06 12:23:02 +00:00
Simon Larsen
988d828bb6 Refactor Dashboard components for improved readability and consistency in code formatting 2024-11-06 12:20:37 +00:00
Simon Larsen
7fd0000c68 Update documentation for self-hosting OneUptime with clearer endpoint examples 2024-11-06 11:41:41 +00:00
Simon Larsen
6286653dd4 Enhance DashboardBaseComponent to track component position using state and update on dashboard view changes 2024-11-05 19:42:51 +00:00
Simon Larsen
9690a5897b Refactor Dashboard components to use component IDs instead of full component objects for improved performance and clarity 2024-11-05 14:58:42 +00:00
Simon Larsen
eefac8703d Update DashboardBaseComponent to use event.pageX and event.pageY for accurate position calculations 2024-11-01 17:13:54 +00:00
Simon Larsen
077a3aad3b Refactor DashboardBaseComponent to improve coordinate calculation and ensure new positions remain within dashboard bounds 2024-11-01 16:55:17 +00:00
Simon Larsen
3ae72726b8 Fix DashboardBaseComponent position calculations to ensure new coordinates remain within bounds 2024-11-01 14:32:21 +00:00
Simon Larsen
90c0e42eb1 Refactor DashboardCanvas and DashboardBaseComponent to enhance component positioning and sizing logic 2024-11-01 14:22:13 +00:00
Simon Larsen
8877ce6d12 Refactor Dashboard size calculations and update DashboardTextComponent height 2024-11-01 13:15:31 +00:00
Simon Larsen
1af5dae991 Refactor DashboardBaseComponent to use consistent naming conventions for width and height variables 2024-11-01 10:54:31 +00:00
Simon Larsen
ee49f3e6dd Refactor GetHeightOfDashboardComponent and GetWidthOfDashboardComponent functions 2024-10-31 18:31:45 +00:00
563 changed files with 33269 additions and 4249 deletions

View File

@@ -240,7 +240,7 @@ jobs:
- name: build docker image
run: sudo docker build -f ./Probe/Dockerfile .
docker-build-ingestor:
docker-build-probe-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
@@ -253,7 +253,52 @@ jobs:
# build image probe api
- name: build docker image
run: sudo docker build -f ./Ingestor/Dockerfile .
run: sudo docker build -f ./ProbeIngest/Dockerfile .
docker-build-open-telemetry-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image probe api
- name: build docker image
run: sudo docker build -f ./OpenTelemetryIngest/Dockerfile .
docker-build-incoming-request-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image probe api
- name: build docker image
run: sudo docker build -f ./IncomingRequestIngest/Dockerfile .
docker-build-fluent-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image probe api
- name: build docker image
run: sudo docker build -f ./FluentIngest/Dockerfile .
docker-build-status-page:
runs-on: ubuntu-latest

View File

@@ -204,7 +204,7 @@ jobs:
- run: cd Common && npm install
- run: cd Probe && npm install && npm run compile && npm run dep-check
compile-ingestor:
compile-probe-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
@@ -214,7 +214,44 @@ jobs:
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd Ingestor && npm install && npm run compile && npm run dep-check
- run: cd ProbeIngest && npm install && npm run compile && npm run dep-check
compile-open-telemetry-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd OpenTelemetryIngest && npm install && npm run compile && npm run dep-check
compile-incoming-request-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd IncomingRequestIngest && npm install && npm run compile && npm run dep-check
compile-fluent-ingest:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd FluentIngest && npm install && npm run compile && npm run dep-check
compile-status-page:

View File

@@ -553,7 +553,7 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
ingestor-docker-image-deploy:
probe-ingest-docker-image-deploy:
needs: [generate-build-number]
runs-on: ubuntu-latest
steps:
@@ -562,8 +562,8 @@ jobs:
uses: docker/metadata-action@v4
with:
images: |
oneuptime/ingestor
ghcr.io/oneuptime/ingestor
oneuptime/probe-ingest
ghcr.io/oneuptime/probe-ingest
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
@@ -585,7 +585,7 @@ jobs:
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy ingestor.
# Build and deploy probe-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
@@ -603,7 +603,188 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Ingestor/Dockerfile
file: ./ProbeIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
open-telemetry-ingest-docker-image-deploy:
needs: [generate-build-number]
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/open-telemetry-ingest
ghcr.io/oneuptime/open-telemetry-ingest
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy open-telemetry-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./OpenTelemetryIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
incoming-request-ingest-docker-image-deploy:
needs: [generate-build-number]
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/incoming-request-ingest
ghcr.io/oneuptime/incoming-request-ingest
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy incoming-request-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./IncomingRequestIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
fluent-ingest-docker-image-deploy:
needs: [generate-build-number]
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/fluent-ingest
ghcr.io/oneuptime/fluent-ingest
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy fluent-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./FluentIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
@@ -1049,7 +1230,7 @@ jobs:
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
- name: Publish Infrastructure Agent
- name: Publish NPM Packages
run: bash ./Scripts/NPM/PublishAllPackages.sh
@@ -1409,7 +1590,7 @@ jobs:
test-e2e-release-saas:
runs-on: ubuntu-latest
needs: [copilot-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy]
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, probe-ingest-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -1462,7 +1643,7 @@ jobs:
test-e2e-release-self-hosted:
runs-on: ubuntu-latest
# After all the jobs runs
needs: [copilot-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, ingestor-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy]
needs: [open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, incoming-request-ingest-docker-image-deploy, fluent-ingest-docker-image-deploy, docs-docker-image-deploy, api-reference-docker-image-deploy, workflow-docker-image-deploy, llm-docker-image-deploy, accounts-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, dashboard-docker-image-deploy, haraka-docker-image-deploy, probe-ingest-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, worker-docker-image-deploy, otel-collector-docker-image-deploy, probe-docker-image-deploy, status-page-docker-image-deploy, test-docker-image-deploy, test-server-docker-image-deploy, publish-npm-packages, e2e-docker-image-deploy, helm-chart-deploy, generate-build-number, nginx-docker-image-deploy]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -1549,15 +1730,25 @@ jobs:
uses: actions/setup-go@v4
- name: Install GoReleaser
uses: goreleaser/goreleaser-action@v5
uses: goreleaser/goreleaser-action@v6.1.0
with:
install-only: true
- name: GoReleaser Version
run: goreleaser -v
# This tool is used to generate .rpm and .deb packages
- name: Install NFPM
run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@latest
- name: Show GoReleaser version
run: goreleaser -v
- name: Run GoReleaser
run: cd InfrastructureAgent && export GORELEASER_CURRENT_TAG=7.0.${{needs.generate-build-number.outputs.build_number}} && goreleaser release --clean --snapshot
- name: Release MSI Images
run: cd InfrastructureAgent && bash build-msi.sh 7.0.${{needs.generate-build-number.outputs.build_number}}
# Upload binaries to github release
- name: Release

View File

@@ -599,7 +599,7 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
ingestor-docker-image-deploy:
probe-ingest-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
@@ -608,8 +608,8 @@ jobs:
uses: docker/metadata-action@v4
with:
images: |
oneuptime/ingestor
ghcr.io/oneuptime/ingestor
oneuptime/probe-ingest
ghcr.io/oneuptime/probe-ingest
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
@@ -632,7 +632,7 @@ jobs:
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy ingestor.
# Build and deploy probe-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
@@ -650,7 +650,190 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Ingestor/Dockerfile
file: ./ProbeIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
incoming-request-ingest-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/incoming-request-ingest
ghcr.io/oneuptime/incoming-request-ingest
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy incoming-request-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./IncomingRequestIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
open-telemetry-ingest-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/open-telemetry-ingest
ghcr.io/oneuptime/open-telemetry-ingest
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy incoming-request-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./OpenTelemetryIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
fluent-ingest-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/fluent-ingest
ghcr.io/oneuptime/fluent-ingest
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy probe-ingest.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./FluentIngest/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
@@ -1343,7 +1526,7 @@ jobs:
test-helm-chart:
runs-on: ubuntu-latest
needs: [llm-docker-image-deploy, copilot-docker-image-deploy, docs-docker-image-deploy, worker-docker-image-deploy, workflow-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, api-reference-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, ingestor-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy]
needs: [llm-docker-image-deploy, open-telemetry-ingest-docker-image-deploy, copilot-docker-image-deploy, docs-docker-image-deploy, worker-docker-image-deploy, workflow-docker-image-deploy, isolated-vm-docker-image-deploy, home-docker-image-deploy, api-reference-docker-image-deploy, test-server-docker-image-deploy, test-docker-image-deploy, probe-ingest-docker-image-deploy, probe-docker-image-deploy, haraka-docker-image-deploy, dashboard-docker-image-deploy, admin-dashboard-docker-image-deploy, app-docker-image-deploy, accounts-docker-image-deploy, otel-collector-docker-image-deploy, status-page-docker-image-deploy, nginx-docker-image-deploy, e2e-docker-image-deploy, fluent-ingest-docker-image-deploy, incoming-request-ingest-docker-image-deploy]
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
@@ -1457,6 +1640,56 @@ jobs:
# Optional. Defaults to repository settings.
retention-days: 7
infrastructure-agent-deploy:
needs: [generate-build-number]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Set up Go
uses: actions/setup-go@v4
- name: Install GoReleaser
uses: goreleaser/goreleaser-action@v6.1.0
with:
install-only: true
- name: GoReleaser Version
run: goreleaser -v
# This tool is used to generate .rpm and .deb packages
- name: Install NFPM
run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@latest
- name: Show GoReleaser version
run: goreleaser -v
- name: Run GoReleaser
run: cd InfrastructureAgent && export GORELEASER_CURRENT_TAG=7.0.${{needs.generate-build-number.outputs.build_number}} && goreleaser release --clean --snapshot
- name: Release MSI Images
run: cd InfrastructureAgent && bash build-msi.sh 7.0.${{needs.generate-build-number.outputs.build_number}}
- name: Upload Release Binaries
uses: actions/upload-artifact@v4
# Run this on failure
with:
# Name of the artifact to upload.
# Optional. Default is 'artifact'
name: binaries
# A file, directory or wildcard pattern that describes what to upload
# Required.
path: |
./InfrastructureAgent/dist
# Duration after which artifact will expire in days. 0 means using default retention.
# Minimum 1 day.
# Maximum 90 days unless changed from the repository settings page.
# Optional. Defaults to repository settings.
retention-days: 7

View File

@@ -0,0 +1,20 @@
name: Fluent Ingest Test
on:
pull_request:
push:
branches-ignore:
- 'hotfix-*' # excludes hotfix branches
- 'release'
jobs:
test:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd FluentIngest && npm install && npm run test

View File

@@ -0,0 +1,21 @@
name: Incoming Request Ingest Test
on:
pull_request:
push:
branches-ignore:
- 'hotfix-*' # excludes hotfix branches
- 'release'
jobs:
test:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd IncomingRequestIngest && npm install && npm run test

View File

@@ -0,0 +1,21 @@
name: OpenTelemetryIngest Test
on:
pull_request:
push:
branches-ignore:
- 'hotfix-*' # excludes hotfix branches
- 'release'
jobs:
test:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd OpenTelemetryIngest && npm install && npm run test

View File

@@ -1,4 +1,4 @@
name: Ingestor Test
name: ProbeIngest Test
on:
pull_request:
@@ -17,5 +17,5 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Ingestor && npm install && npm run test
- run: cd ProbeIngest && npm install && npm run test

View File

@@ -1 +1 @@
# Description: Copilot will run this script before we commit the changes to your repository.
# Description: Copilot will run this script before we commit the changes to your repository.

46
.vscode/launch.json vendored
View File

@@ -163,8 +163,8 @@
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/Ingestor",
"name": "Ingestor: Debug with Docker",
"localRoot": "${workspaceFolder}/ProbeIngest",
"name": "ProbeIngest: Debug with Docker",
"port": 9932,
"remoteRoot": "/usr/src/app",
"request": "attach",
@@ -175,6 +175,48 @@
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/IncomingRequestIngest",
"name": "IncomingRequestIngest: Debug with Docker",
"port": 9933,
"remoteRoot": "/usr/src/app",
"request": "attach",
"skipFiles": [
"<node_internals>/**"
],
"type": "node",
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/OpenTelemetryIngest",
"name": "OpenTelemetryIngest: Debug with Docker",
"port": 9938,
"remoteRoot": "/usr/src/app",
"request": "attach",
"skipFiles": [
"<node_internals>/**"
],
"type": "node",
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/FluentIngest",
"name": "Fluent Ingest: Debug with Docker",
"port": 9937,
"remoteRoot": "/usr/src/app",
"request": "attach",
"skipFiles": [
"<node_internals>/**"
],
"type": "node",
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/IsolatedVM",

View File

@@ -17,10 +17,11 @@ const init: PromiseVoidFunction = async (): Promise<void> => {
const statusCheck: PromiseVoidFunction = async (): Promise<void> => {
// Check the status of infrastructure components
return await InfrastructureStatus.checkStatus({
return await InfrastructureStatus.checkStatusWithRetry({
checkClickhouseStatus: false,
checkPostgresStatus: false,
checkRedisStatus: false,
retryCount: 3,
});
};

View File

@@ -179,7 +179,7 @@ const RegisterPage: () => JSX.Element = () => {
]);
if (error) {
return <ErrorMessage error={error} />;
return <ErrorMessage message={error} />;
}
if (isLoading) {

View File

@@ -41,7 +41,7 @@ const Logout: FunctionComponent = (): ReactElement => {
]}
>
{!error ? <PageLoader isVisible={true} /> : <></>}
{error ? <ErrorMessage error={error} /> : <></>}
{error ? <ErrorMessage message={error} /> : <></>}
</Page>
);
};

View File

@@ -61,7 +61,7 @@ const Settings: FunctionComponent = (): ReactElement => {
}
if (error) {
return <ErrorMessage error={error} />;
return <ErrorMessage message={error} />;
}
return (

View File

@@ -99,7 +99,7 @@ const Settings: FunctionComponent = (): ReactElement => {
},
title: "Description",
fieldType: FormFieldSchemaType.LongText,
required: true,
required: false,
placeholder: "This probe is to monitor all the internal services.",
},
@@ -170,6 +170,7 @@ const Settings: FunctionComponent = (): ReactElement => {
field: {
description: true,
},
noValueMessage: "-",
title: "Description",
type: FieldType.Text,
},

View File

@@ -10,7 +10,7 @@ import GlobalConfigAPI from "Common/Server/API/GlobalConfigAPI";
import MonitorGroupAPI from "Common/Server/API/MonitorGroupAPI";
import NotificationAPI from "Common/Server/API/NotificationAPI";
import TelemetryAPI from "Common/Server/API/TelemetryAPI";
import Ingestor from "Common/Server/API/ProbeAPI";
import ProbeAPI from "Common/Server/API/ProbeAPI";
import ProjectAPI from "Common/Server/API/ProjectAPI";
import ProjectSsoAPI from "Common/Server/API/ProjectSSO";
@@ -156,9 +156,6 @@ import MonitorGroupOwnerUserService, {
import MonitorGroupResourceService, {
Service as MonitorGroupResourceServiceType,
} from "Common/Server/Services/MonitorGroupResourceService";
import MonitorMetricsByMinuteService, {
MonitorMetricsByMinuteService as MonitorMetricsByMinuteServiceType,
} from "Common/Server/Services/MonitorMetricsByMinuteService";
import MonitorOwnerTeamService, {
Service as MonitorOwnerTeamServiceType,
} from "Common/Server/Services/MonitorOwnerTeamService";
@@ -376,7 +373,6 @@ import FeatureSet from "Common/Server/Types/FeatureSet";
import Express, { ExpressApplication } from "Common/Server/Utils/Express";
import Log from "Common/Models/AnalyticsModels/Log";
import Metric from "Common/Models/AnalyticsModels/Metric";
import MonitorMetricsByMinute from "Common/Models/AnalyticsModels/MonitorMetricsByMinute";
import Span from "Common/Models/AnalyticsModels/Span";
import ApiKey from "Common/Models/DatabaseModels/ApiKey";
import ApiKeyPermission from "Common/Models/DatabaseModels/ApiKeyPermission";
@@ -500,6 +496,22 @@ import ScheduledMaintenanceTemplateOwnerUserService, {
} from "Common/Server/Services/ScheduledMaintenanceTemplateOwnerUserService";
import TableView from "Common/Models/DatabaseModels/TableView";
import IncidentFeed from "Common/Models/DatabaseModels/IncidentFeed";
import AlertFeed from "Common/Models/DatabaseModels/AlertFeed";
import ScheduledMaintenanceFeed from "Common/Models/DatabaseModels/ScheduledMaintenanceFeed";
import IncidentFeedService, {
Service as IncidentFeedServiceType,
} from "Common/Server/Services/IncidentFeedService";
import AlertFeedService, {
Service as AlertFeedServiceType,
} from "Common/Server/Services/AlertFeedService";
import ScheduledMaintenanceFeedService, {
Service as ScheduledMaintenanceFeedServiceType,
} from "Common/Server/Services/ScheduledMaintenanceFeedService";
const BaseAPIFeatureSet: FeatureSet = {
init: async (): Promise<void> => {
const app: ExpressApplication = Express.getExpressApp();
@@ -530,6 +542,30 @@ const BaseAPIFeatureSet: FeatureSet = {
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<IncidentFeed, IncidentFeedServiceType>(
IncidentFeed,
IncidentFeedService,
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<AlertFeed, AlertFeedServiceType>(
AlertFeed,
AlertFeedService,
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<
ScheduledMaintenanceFeed,
ScheduledMaintenanceFeedServiceType
>(ScheduledMaintenanceFeed, ScheduledMaintenanceFeedService).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<AlertNoteTemplate, AlertNoteTemplateServiceType>(
@@ -673,14 +709,6 @@ const BaseAPIFeatureSet: FeatureSet = {
).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAnalyticsAPI<
MonitorMetricsByMinute,
MonitorMetricsByMinuteServiceType
>(MonitorMetricsByMinute, MonitorMetricsByMinuteService).getRouter(),
);
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,
new BaseAPI<TelemetryIngestionKey, TelemetryIngestionKeyServiceType>(
@@ -1371,7 +1399,7 @@ const BaseAPIFeatureSet: FeatureSet = {
);
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new UserEmailAPI().getRouter());
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new UserSMSAPI().getRouter());
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new Ingestor().getRouter());
app.use(`/${APP_NAME.toLocaleLowerCase()}`, new ProbeAPI().getRouter());
app.use(
`/${APP_NAME.toLocaleLowerCase()}`,

View File

@@ -45,106 +45,112 @@ const router: ExpressRouter = Express.getRouter();
router.get(
"/service-provider-login",
async (req: ExpressRequest, res: ExpressResponse): Promise<void> => {
if (!req.query["email"]) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("Email is required"),
);
}
try {
if (!req.query["email"]) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("Email is required"),
);
}
const email: Email = new Email(req.query["email"] as string);
const email: Email = new Email(req.query["email"] as string);
if (!email) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("Email is required"),
);
}
if (!email) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("Email is required"),
);
}
// get sso config for this user.
// get sso config for this user.
const user: User | null = await UserService.findOneBy({
query: { email: email },
select: {
_id: true,
},
props: {
isRoot: true,
},
});
if (!user) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
);
}
const userId: ObjectID = user.id!;
if (!userId) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
);
}
const projectUserBelongsTo: Array<ObjectID> = (
await TeamMemberService.findBy({
query: { userId: userId },
const user: User | null = await UserService.findOneBy({
query: { email: email },
select: {
projectId: true,
_id: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
})
).map((teamMember: TeamMember) => {
return teamMember.projectId!;
});
});
if (projectUserBelongsTo.length === 0) {
return Response.sendErrorResponse(
if (!user) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
);
}
const userId: ObjectID = user.id!;
if (!userId) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
);
}
const projectUserBelongsTo: Array<ObjectID> = (
await TeamMemberService.findBy({
query: { userId: userId },
select: {
projectId: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
})
).map((teamMember: TeamMember) => {
return teamMember.projectId!;
});
if (projectUserBelongsTo.length === 0) {
return Response.sendErrorResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
);
}
const projectSSOList: Array<ProjectSSO> = await ProjectSSOService.findBy({
query: {
projectId: QueryHelper.any(projectUserBelongsTo),
isEnabled: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
select: {
name: true,
description: true,
_id: true,
projectId: true,
project: {
name: true,
} as Select<Project>,
},
props: {
isRoot: true,
},
});
return Response.sendEntityArrayResponse(
req,
res,
new BadRequestException("No SSO config found for this user"),
projectSSOList,
projectSSOList.length,
ProjectSSO,
);
} catch (err) {
logger.error(err);
Response.sendErrorResponse(req, res, err as Exception);
}
const projectSSOList: Array<ProjectSSO> = await ProjectSSOService.findBy({
query: {
projectId: QueryHelper.any(projectUserBelongsTo),
isEnabled: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
select: {
name: true,
description: true,
_id: true,
projectId: true,
project: {
name: true,
} as Select<Project>,
},
props: {
isRoot: true,
},
});
return Response.sendEntityArrayResponse(
req,
res,
projectSSOList,
projectSSOList.length,
ProjectSSO,
);
},
);
@@ -153,7 +159,7 @@ router.get(
async (
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
_next: NextFunction,
): Promise<void> => {
try {
if (!req.params["projectId"]) {
@@ -227,7 +233,9 @@ router.get(
return Response.redirect(req, res, samlRequestUrl);
} catch (err) {
return next(err);
logger.error(err);
Response.sendErrorResponse(req, res, err as Exception);
}
},
);
@@ -522,7 +530,8 @@ const loginUserWithSso: LoginUserWithSsoFunction = async (
);
} catch (err) {
logger.error(err);
Response.sendErrorResponse(req, res, new ServerException());
Response.sendErrorResponse(req, res, err as Exception);
}
};

View File

@@ -0,0 +1,17 @@
{{> Start this}}
{{> CustomLogo this}}
{{> EmailTitle title=(concat statusPageName " - Please confirm your subscription" ) }}
{{> InfoBlock info="You will be the first to hear from us when there are any incidents, announcements or scheduled maintenance events."}}
{{> ButtonBlock buttonUrl=confirmationUrl buttonText="Confirm Subscription"}}
{{> InfoBlock info="You can also view the status page by visiting this link:"}}
{{> InfoBlock info=statusPageUrl}}
{{> UnsubscribeBlock this}}
{{> VerticalSpace this}}
{{> End this}}

View File

@@ -3,16 +3,15 @@
{{> CustomLogo this}}
{{> EmailTitle title=(concat "You have been subscribed to status page - " statusPageName) }}
{{> InfoBlock info=(concat "You have been subscribed to status page - " statusPageName)}}
{{> InfoBlock info="You will be the first to hear from us when there are any incidents, announcements or scheduled maintenance events."}}
{{> ButtonBlock buttonUrl=statusPageUrl buttonText="Go to Status Page"}}
{{> InfoBlock info="You can also view the status page by visiting these link:"}}
{{> InfoBlock info="You can also view the status page by visiting this link:"}}
{{> InfoBlock info=statusPageUrl}}
{{> UnsubscribeBlock this}}
{{> VerticalSpace this}}
{{> End this}}
{{> End this}}

View File

@@ -1,7 +1,7 @@
{{> Start this}}
{{> CustomLogo this}}
{{> EmailTitle title=(concat "Incident: " incidentTitle) }}
{{> EmailTitle title=emailTitle }}
{{> InfoBlock info="Incident state has changed. Here are the details: "}}

View File

@@ -24,10 +24,42 @@ const init: PromiseVoidFunction = async (): Promise<void> => {
const statusCheck: PromiseVoidFunction = async (): Promise<void> => {
// Check the status of infrastructure components
return await InfrastructureStatus.checkStatus({
return await InfrastructureStatus.checkStatusWithRetry({
checkClickhouseStatus: true,
checkPostgresStatus: true,
checkRedisStatus: true,
retryCount: 3,
});
};
const globalCacheCheck: PromiseVoidFunction = async (): Promise<void> => {
// Check the status of cache
return await InfrastructureStatus.checkStatusWithRetry({
checkClickhouseStatus: false,
checkPostgresStatus: false,
checkRedisStatus: true,
retryCount: 3,
});
};
const analyticsDatabaseCheck: PromiseVoidFunction =
async (): Promise<void> => {
// Check the status of analytics database
return await InfrastructureStatus.checkStatusWithRetry({
checkClickhouseStatus: true,
checkPostgresStatus: false,
checkRedisStatus: false,
retryCount: 3,
});
};
const databaseCheck: PromiseVoidFunction = async (): Promise<void> => {
// Check the status of database
return await InfrastructureStatus.checkStatusWithRetry({
checkClickhouseStatus: false,
checkPostgresStatus: true,
checkRedisStatus: false,
retryCount: 3,
});
};
@@ -37,6 +69,9 @@ const init: PromiseVoidFunction = async (): Promise<void> => {
statusOptions: {
liveCheck: statusCheck,
readyCheck: statusCheck,
globalCacheCheck: globalCacheCheck,
analyticsDatabaseCheck: analyticsDatabaseCheck,
databaseCheck: databaseCheck,
},
});

View File

@@ -1,7 +1,6 @@
import AnalyticsBaseModel from "./AnalyticsBaseModel/AnalyticsBaseModel";
import Log from "./Log";
import Metric from "./Metric";
import MonitorMetricsByMinute from "./MonitorMetricsByMinute";
import Span from "./Span";
import TelemetryAttribute from "./TelemetryAttribute";
import ExceptionInstance from "./ExceptionInstance";
@@ -10,7 +9,6 @@ const AnalyticsModels: Array<{ new (): AnalyticsBaseModel }> = [
Log,
Span,
Metric,
MonitorMetricsByMinute,
TelemetryAttribute,
ExceptionInstance,
];

View File

@@ -19,6 +19,13 @@ export enum MetricPointType {
ExponentialHistogram = "ExponentialHistogram",
}
export enum ServiceType {
OpenTelemetry = "OpenTelemetry",
Monitor = "Monitor",
Alert = "Alert",
Incident = "Incident",
}
export default class Metric extends AnalyticsBaseModel {
public constructor() {
super({
@@ -78,10 +85,11 @@ export default class Metric extends AnalyticsBaseModel {
},
}),
// this can also be the monitor id or the telemetry service id.
new AnalyticsTableColumn({
key: "serviceId",
title: "Service ID",
description: "ID of the Service which created the log",
description: "ID of the Service which created the Metric",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
@@ -101,6 +109,30 @@ export default class Metric extends AnalyticsBaseModel {
},
}),
// this can also be the monitor id or the telemetry service id.
new AnalyticsTableColumn({
key: "serviceType",
title: "Service Type",
description: "Type of the service that this telemetry belongs to",
required: false,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadTelemetryServiceLog,
],
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateTelemetryServiceLog,
],
update: [],
},
}),
// add name and description
new AnalyticsTableColumn({
@@ -539,6 +571,10 @@ export default class Metric extends AnalyticsBaseModel {
return this.getColumnValue("serviceId") as ObjectID | undefined;
}
public get serviceType(): ServiceType | undefined {
return this.getColumnValue("serviceType") as ServiceType | undefined;
}
public get name(): string | undefined {
return this.getColumnValue("name") as string | undefined;
}
@@ -595,6 +631,10 @@ export default class Metric extends AnalyticsBaseModel {
this.setColumnValue("serviceId", v);
}
public set serviceType(v: ServiceType | undefined) {
this.setColumnValue("serviceType", v);
}
public get time(): Date | undefined {
return this.getColumnValue("time") as Date | undefined;
}

View File

@@ -1,177 +0,0 @@
import AnalyticsBaseModel from "./AnalyticsBaseModel/AnalyticsBaseModel";
import Route from "../../Types/API/Route";
import AnalyticsTableEngine from "../../Types/AnalyticsDatabase/AnalyticsTableEngine";
import AnalyticsTableColumn from "../../Types/AnalyticsDatabase/TableColumn";
import TableColumnType from "../../Types/AnalyticsDatabase/TableColumnType";
import BrowserType from "../../Types/BrowserType";
import { JSONObject } from "../../Types/JSON";
import { CheckOn } from "../../Types/Monitor/CriteriaFilter";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
import ScreenSizeType from "../../Types/ScreenSizeType";
export interface MonitorMetricsMiscData {
diskPath?: string;
probeId?: string;
browserType?: BrowserType;
screenSizeType?: ScreenSizeType;
}
export default class MonitorMetricsByMinute extends AnalyticsBaseModel {
public constructor() {
super({
tableName: "MonitorMetrics",
tableEngine: AnalyticsTableEngine.MergeTree,
singularName: "Monitor Metric",
pluralName: "Monitor Metrics",
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
delete: [],
},
crudApiPath: new Route("/monitor-metrics"),
tableColumns: [
new AnalyticsTableColumn({
key: "projectId",
title: "Project ID",
description: "ID of project",
required: true,
type: TableColumnType.ObjectID,
isTenantId: true,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
},
}),
new AnalyticsTableColumn({
key: "monitorId",
title: "Monitor ID",
description: "ID of the Monitor which this metric belongs to",
required: true,
type: TableColumnType.ObjectID,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
},
}),
new AnalyticsTableColumn({
key: "metricType",
title: "Metric Type",
description: "Type of metric",
required: true,
type: TableColumnType.Text,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
},
}),
new AnalyticsTableColumn({
key: "metricValue",
title: "Metric Value",
description: "Value of the metric",
required: true,
type: TableColumnType.Number,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
},
}),
new AnalyticsTableColumn({
key: "miscData",
title: "Misc Data",
description: "Misc data for the metric (if any)",
required: false,
type: TableColumnType.JSON,
accessControl: {
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectMonitor,
],
create: [],
update: [],
},
}),
],
sortKeys: ["projectId", "monitorId", "createdAt"],
primaryKeys: ["projectId", "monitorId"],
});
}
public get projectId(): ObjectID | undefined {
return this.getColumnValue("projectId") as ObjectID | undefined;
}
public set projectId(v: ObjectID | undefined) {
this.setColumnValue("projectId", v);
}
public get monitorId(): ObjectID | undefined {
return this.getColumnValue("monitorId") as ObjectID | undefined;
}
public set monitorId(v: ObjectID | undefined) {
this.setColumnValue("monitorId", v);
}
public get metricType(): CheckOn | undefined {
return this.getColumnValue("metricType") as CheckOn | undefined;
}
public set metricType(v: CheckOn | undefined) {
this.setColumnValue("metricType", v);
}
public get metricValue(): number | undefined {
return this.getColumnValue("metricValue") as number | undefined;
}
public set metricValue(v: number | undefined) {
this.setColumnValue("metricValue", v);
}
public get miscData(): MonitorMetricsMiscData | undefined {
return this.getColumnValue("miscData") as
| MonitorMetricsMiscData
| undefined;
}
public set miscData(v: MonitorMetricsMiscData | undefined) {
this.setColumnValue("miscData", v as JSONObject);
}
}

View File

@@ -801,7 +801,6 @@ export default class Alert extends BaseModel {
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.Markdown,
required: false,

View File

@@ -0,0 +1,518 @@
import Alert from "./Alert";
import Project from "./Project";
import User from "./User";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import CanAccessIfCanReadOn from "../../Types/Database/CanAccessIfCanReadOn";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import EnableDocumentation from "../../Types/Database/EnableDocumentation";
import EnableWorkflow from "../../Types/Database/EnableWorkflow";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import TenantColumn from "../../Types/Database/TenantColumn";
import IconProp from "../../Types/Icon/IconProp";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
import ColumnLength from "../../Types/Database/ColumnLength";
import Color from "../../Types/Color";
export enum AlertFeedEventType {
PublicNote = "PublicNote",
SubscriberEmailSent = "SubscriberEmailSent",
OwnerEmailSent = "OwnerEmailSent",
AlertCreated = "AlertCreated",
AlertAcknowledged = "AlertAcknowledged",
AlertResolved = "AlertResolved",
PrivateNote = "PrivateNote",
}
@EnableDocumentation()
@CanAccessIfCanReadOn("alert")
@TenantColumn("projectId")
@TableAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
delete: [],
update: [],
})
@EnableWorkflow({
create: true,
delete: true,
update: true,
read: true,
})
@CrudApiEndpoint(new Route("/alert-feed"))
@Entity({
name: "AlertFeed",
})
@TableMetadata({
tableName: "AlertFeed",
singularName: "Alert Feed",
pluralName: "Alert Feeds",
icon: IconProp.List,
tableDescription:
"Log of the entire alert state change. This is a log of all the alert state changes, public notes, more etc.",
})
export default class AlertFeed extends BaseModel {
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "projectId",
type: TableColumnType.Entity,
modelType: Project,
title: "Project",
description: "Relation to Project Resource in which this object belongs",
})
@ManyToOne(
() => {
return Project;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "projectId" })
public project?: Project = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
canReadOnRelationQuery: true,
title: "Project ID",
description: "ID of your OneUptime Project in which this object belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public projectId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "alertId",
type: TableColumnType.Entity,
modelType: Alert,
title: "Alert",
description: "Relation to Alert in which this resource belongs",
})
@ManyToOne(
() => {
return Alert;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "alertId" })
public alert?: Alert = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
title: "Alert ID",
description: "Relation to Alert ID in which this resource belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public alertId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "createdByUserId",
type: TableColumnType.Entity,
modelType: User,
title: "Created by User",
description:
"Relation to User who created this object (if this object was created by a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "createdByUserId" })
public createdByUser?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Created by User ID",
description:
"User ID who created this object (if this object was created by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public createdByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "deletedByUserId",
type: TableColumnType.Entity,
title: "Deleted by User",
description:
"Relation to User who deleted this object (if this object was deleted by a User)",
})
@ManyToOne(
() => {
return User;
},
{
cascade: false,
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "deletedByUserId" })
public deletedByUser?: User = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Deleted by User ID",
description:
"User ID who deleted this object (if this object was deleted by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: true,
title: "Log (in Markdown)",
description: "Log of the entire alert state change in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: false,
unique: false,
})
public feedInfoInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: false,
title: "More Information (in Markdown)",
description: "More information in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: true,
unique: false,
})
public moreInformationInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
required: true,
title: "Alert Feed Event",
description: "Alert Feed Event",
})
@Column({
type: ColumnType.ShortText,
nullable: false,
unique: false,
})
public alertFeedEventType?: AlertFeedEventType = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Color,
required: true,
title: "Color",
description: "Display color for the alert log",
})
@Column({
type: ColumnType.Color,
length: ColumnLength.Color,
nullable: false,
unique: false,
transformer: Color.getDatabaseTransformer(),
})
public displayColor?: Color = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "userId",
type: TableColumnType.Entity,
modelType: User,
title: "User",
description:
"Relation to User who this feed belongs to (if this feed belongs to a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "userId" })
public user?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "User ID",
description:
"User who this feed belongs to (if this feed belongs to a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public userId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateAlertFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadAlertFeed,
],
update: [],
})
@TableColumn({
title: "Feed Posted At",
description: "Date and time when the feed was posted",
type: TableColumnType.Date,
})
@Column({
type: ColumnType.Date,
nullable: true,
unique: false,
})
public postedAt?: Date = undefined;
}

View File

@@ -853,9 +853,13 @@ export default class Incident extends BaseModel {
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditProjectIncident,
],
})
@Index()
@TableColumn({
type: TableColumnType.Markdown,
required: false,
@@ -1087,4 +1091,33 @@ export default class Incident extends BaseModel {
nullable: true,
})
public telemetryQuery?: TelemetryQuery = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateProjectIncident,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadProjectIncident,
],
update: [],
})
@Index()
@TableColumn({
isDefaultValueColumn: false,
required: false,
type: TableColumnType.Number,
title: "Incident Number",
description: "Incident Number",
})
@Column({
type: ColumnType.Number,
nullable: true,
})
public incidentNumber?: number = undefined;
}

View File

@@ -0,0 +1,526 @@
import Incident from "./Incident";
import Project from "./Project";
import User from "./User";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import CanAccessIfCanReadOn from "../../Types/Database/CanAccessIfCanReadOn";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import EnableDocumentation from "../../Types/Database/EnableDocumentation";
import EnableWorkflow from "../../Types/Database/EnableWorkflow";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import TenantColumn from "../../Types/Database/TenantColumn";
import IconProp from "../../Types/Icon/IconProp";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
import Color from "../../Types/Color";
import ColumnLength from "../../Types/Database/ColumnLength";
export enum IncidentFeedEventType {
PublicNote = "PublicNote",
SubscriberNotificationSent = "SubscriberNotificationSent",
OwnerNotificationSent = "OwnerNotificationSent",
OwnerUserAdded = "OwnerUserAdded",
OwnerTeamAdded = "OwnerTeamAdded",
IncidentCreated = "IncidentCreated",
IncidentStateChanged = "IncidentStateChanged",
PrivateNote = "PrivateNote",
IncidentUpdated = "IncidentUpdated",
RootCause = "RootCause",
RemediationNotes = "RemediationNotes",
OwnerUserRemoved = "OwnerUserRemoved",
OwnerTeamRemoved = "OwnerTeamRemoved",
OnCallPolicy = "OnCallPolicy",
OnCallNotification = "OnCallNotification",
}
@EnableDocumentation()
@CanAccessIfCanReadOn("incident")
@TenantColumn("projectId")
@TableAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
delete: [],
update: [],
})
@EnableWorkflow({
create: true,
delete: true,
update: true,
read: true,
})
@CrudApiEndpoint(new Route("/incident-feed"))
@Entity({
name: "IncidentFeed",
})
@TableMetadata({
tableName: "IncidentFeed",
singularName: "Incident Feed",
pluralName: "Incident Feeds",
icon: IconProp.List,
tableDescription:
"Log of the entire incident state change. This is a log of all the incident state changes, public notes, more etc.",
})
export default class IncidentFeed extends BaseModel {
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "projectId",
type: TableColumnType.Entity,
modelType: Project,
title: "Project",
description: "Relation to Project Resource in which this object belongs",
})
@ManyToOne(
() => {
return Project;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "projectId" })
public project?: Project = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
canReadOnRelationQuery: true,
title: "Project ID",
description: "ID of your OneUptime Project in which this object belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public projectId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "incidentId",
type: TableColumnType.Entity,
modelType: Incident,
title: "Incident",
description: "Relation to Incident in which this resource belongs",
})
@ManyToOne(
() => {
return Incident;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "incidentId" })
public incident?: Incident = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
title: "Incident ID",
description: "Relation to Incident ID in which this resource belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public incidentId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "createdByUserId",
type: TableColumnType.Entity,
modelType: User,
title: "Created by User",
description:
"Relation to User who created this object (if this object was created by a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "createdByUserId" })
public createdByUser?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Created by User ID",
description:
"User ID who created this object (if this object was created by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public createdByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "deletedByUserId",
type: TableColumnType.Entity,
title: "Deleted by User",
description:
"Relation to User who deleted this object (if this object was deleted by a User)",
})
@ManyToOne(
() => {
return User;
},
{
cascade: false,
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "deletedByUserId" })
public deletedByUser?: User = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Deleted by User ID",
description:
"User ID who deleted this object (if this object was deleted by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: true,
title: "Log (in Markdown)",
description: "Log of the entire incident state change in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: false,
unique: false,
})
public feedInfoInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: false,
title: "More Information (in Markdown)",
description: "More information in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: true,
unique: false,
})
public moreInformationInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
required: true,
title: "Incident Feed Event",
description: "Incident Feed Event",
})
@Column({
type: ColumnType.ShortText,
nullable: false,
unique: false,
})
public incidentFeedEventType?: IncidentFeedEventType = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Color,
required: true,
title: "Color",
description: "Display color for the incident log",
})
@Column({
type: ColumnType.Color,
length: ColumnLength.Color,
nullable: false,
unique: false,
transformer: Color.getDatabaseTransformer(),
})
public displayColor?: Color = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "userId",
type: TableColumnType.Entity,
modelType: User,
title: "User",
description:
"Relation to User who this feed belongs to (if this feed belongs to a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "userId" })
public user?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "User ID",
description:
"User who this feed belongs to (if this feed belongs to a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public userId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
title: "Feed Posted At",
description: "Date and time when the feed was posted",
type: TableColumnType.Date,
})
@Column({
type: ColumnType.Date,
nullable: true,
unique: false,
})
public postedAt?: Date = undefined;
}

View File

@@ -507,7 +507,6 @@ export default class IncidentStateTimeline extends BaseModel {
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.Markdown,
required: false,

View File

@@ -20,6 +20,7 @@ import GreenlockCertificate from "./GreenlockCertificate";
import GreenlockChallenge from "./GreenlockChallenge";
// Incidents
import Incident from "./Incident";
import IncidentFeed from "./IncidentFeed";
import IncidentCustomField from "./IncidentCustomField";
import IncidentInternalNote from "./IncidentInternalNote";
import IncidentNoteTemplate from "./IncidentNoteTemplate";
@@ -142,6 +143,7 @@ import ScheduledMaintenanceTemplateOwnerTeam from "./ScheduledMaintenanceTemplat
import ScheduledMaintenanceTemplateOwnerUser from "./ScheduledMaintenanceTemplateOwnerUser";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import AlertState from "./AlertState";
import Alert from "./Alert";
import AlertCustomField from "./AlertCustomField";
@@ -151,10 +153,13 @@ import AlertOwnerTeam from "./AlertOwnerTeam";
import AlertOwnerUser from "./AlertOwnerUser";
import AlertSeverity from "./AlertSeverity";
import AlertNoteTemplate from "./AlertNoteTemplate";
import AlertFeed from "./AlertFeed";
import TableView from "./TableView";
import Dashboard from "./Dashboard";
import MonitorTest from "./MonitorTest";
import ScheduledMaintenanceFeed from "./ScheduledMaintenanceFeed";
const AllModelTypes: Array<{
new (): BaseModel;
@@ -187,6 +192,7 @@ const AllModelTypes: Array<{
IncidentState,
Incident,
IncidentFeed,
IncidentCustomField,
IncidentStateTimeline,
IncidentInternalNote,
@@ -201,6 +207,7 @@ const AllModelTypes: Array<{
AlertState,
Alert,
AlertFeed,
AlertCustomField,
AlertStateTimeline,
AlertInternalNote,
@@ -232,6 +239,7 @@ const AllModelTypes: Array<{
ScheduledMaintenancePublicNote,
ScheduledMaintenanceInternalNote,
ScheduledMaintenanceCustomField,
ScheduledMaintenanceFeed,
BillingPaymentMethods,
BillingInvoice,

View File

@@ -455,7 +455,6 @@ export default class MonitorStatusTimeline extends BaseModel {
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.Markdown,
required: false,

View File

@@ -0,0 +1,522 @@
import ScheduledMaintenance from "./ScheduledMaintenance";
import Project from "./Project";
import User from "./User";
import BaseModel from "./DatabaseBaseModel/DatabaseBaseModel";
import Route from "../../Types/API/Route";
import ColumnAccessControl from "../../Types/Database/AccessControl/ColumnAccessControl";
import TableAccessControl from "../../Types/Database/AccessControl/TableAccessControl";
import CanAccessIfCanReadOn from "../../Types/Database/CanAccessIfCanReadOn";
import ColumnType from "../../Types/Database/ColumnType";
import CrudApiEndpoint from "../../Types/Database/CrudApiEndpoint";
import EnableDocumentation from "../../Types/Database/EnableDocumentation";
import EnableWorkflow from "../../Types/Database/EnableWorkflow";
import TableColumn from "../../Types/Database/TableColumn";
import TableColumnType from "../../Types/Database/TableColumnType";
import TableMetadata from "../../Types/Database/TableMetadata";
import TenantColumn from "../../Types/Database/TenantColumn";
import IconProp from "../../Types/Icon/IconProp";
import ObjectID from "../../Types/ObjectID";
import Permission from "../../Types/Permission";
import { Column, Entity, Index, JoinColumn, ManyToOne } from "typeorm";
import ColumnLength from "../../Types/Database/ColumnLength";
import Color from "../../Types/Color";
export enum ScheduledMaintenanceFeedEventType {
PublicNote = "PublicNote",
SubscriberEmailSent = "SubscriberEmailSent",
OwnerEmailSent = "OwnerEmailSent",
ScheduledMaintenanceCreated = "ScheduledMaintenanceCreated",
ScheduledMaintenanceAcknowledged = "ScheduledMaintenanceAcknowledged",
ScheduledMaintenanceResolved = "ScheduledMaintenanceResolved",
PrivateNote = "PrivateNote",
}
@EnableDocumentation()
@CanAccessIfCanReadOn("scheduledMaintenance")
@TenantColumn("projectId")
@TableAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
delete: [],
update: [],
})
@EnableWorkflow({
create: true,
delete: true,
update: true,
read: true,
})
@CrudApiEndpoint(new Route("/scheduled-maintenance-feed"))
@Entity({
name: "ScheduledMaintenanceFeed",
})
@TableMetadata({
tableName: "ScheduledMaintenanceFeed",
singularName: "Scheduled Maintenance Feed",
pluralName: "Scheduled Maintenance Feed",
icon: IconProp.List,
tableDescription:
"Log of the entire scheduled maintenance state change. This is a log of all the scheduled maintenance state changes, public notes, more etc.",
})
export default class ScheduledMaintenanceFeed extends BaseModel {
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "projectId",
type: TableColumnType.Entity,
modelType: Project,
title: "Project",
description: "Relation to Project Resource in which this object belongs",
})
@ManyToOne(
() => {
return Project;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "projectId" })
public project?: Project = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
canReadOnRelationQuery: true,
title: "Project ID",
description: "ID of your OneUptime Project in which this object belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public projectId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "scheduledMaintenanceId",
type: TableColumnType.Entity,
modelType: ScheduledMaintenance,
title: "ScheduledMaintenance",
description:
"Relation to ScheduledMaintenance in which this resource belongs",
})
@ManyToOne(
() => {
return ScheduledMaintenance;
},
{
eager: false,
nullable: true,
onDelete: "CASCADE",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "scheduledMaintenanceId" })
public scheduledMaintenance?: ScheduledMaintenance = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@Index()
@TableColumn({
type: TableColumnType.ObjectID,
required: true,
title: "ScheduledMaintenance ID",
description:
"Relation to ScheduledMaintenance ID in which this resource belongs",
})
@Column({
type: ColumnType.ObjectID,
nullable: false,
transformer: ObjectID.getDatabaseTransformer(),
})
public scheduledMaintenanceId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "createdByUserId",
type: TableColumnType.Entity,
modelType: User,
title: "Created by User",
description:
"Relation to User who created this object (if this object was created by a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "createdByUserId" })
public createdByUser?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Created by User ID",
description:
"User ID who created this object (if this object was created by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public createdByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "deletedByUserId",
type: TableColumnType.Entity,
title: "Deleted by User",
description:
"Relation to User who deleted this object (if this object was deleted by a User)",
})
@ManyToOne(
() => {
return User;
},
{
cascade: false,
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "deletedByUserId" })
public deletedByUser?: User = undefined;
@ColumnAccessControl({
create: [],
read: [],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "Deleted by User ID",
description:
"User ID who deleted this object (if this object was deleted by a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: true,
title: "Log (in Markdown)",
description:
"Log of the entire scheduledMaintenance state change in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: false,
unique: false,
})
public feedInfoInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Markdown,
required: false,
title: "More Information (in Markdown)",
description: "More information in Markdown",
})
@Column({
type: ColumnType.Markdown,
nullable: true,
unique: false,
})
public moreInformationInMarkdown?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ShortText,
required: true,
title: "ScheduledMaintenance Log Event",
description: "ScheduledMaintenance Log Event",
})
@Column({
type: ColumnType.ShortText,
nullable: false,
unique: false,
})
public scheduledMaintenanceFeedEventType?: ScheduledMaintenanceFeedEventType =
undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateIncidentFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadIncidentFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.Color,
required: true,
title: "Color",
description: "Display color for this log",
})
@Column({
type: ColumnType.Color,
length: ColumnLength.Color,
nullable: false,
unique: false,
transformer: Color.getDatabaseTransformer(),
})
public displayColor?: Color = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
manyToOneRelationColumn: "userId",
type: TableColumnType.Entity,
modelType: User,
title: "User",
description:
"Relation to User who this feed belongs to (if this feed belongs to a User)",
})
@ManyToOne(
() => {
return User;
},
{
eager: false,
nullable: true,
onDelete: "SET NULL",
orphanedRowAction: "nullify",
},
)
@JoinColumn({ name: "userId" })
public user?: User = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
type: TableColumnType.ObjectID,
title: "User ID",
description:
"User who this feed belongs to (if this feed belongs to a User)",
})
@Column({
type: ColumnType.ObjectID,
nullable: true,
transformer: ObjectID.getDatabaseTransformer(),
})
public userId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateScheduledMaintenanceFeed,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceFeed,
],
update: [],
})
@TableColumn({
title: "Feed Posted At",
description: "Date and time when the feed was posted",
type: TableColumnType.Date,
})
@Column({
type: ColumnType.Date,
nullable: true,
unique: false,
})
public postedAt?: Date = undefined;
}

View File

@@ -789,7 +789,12 @@ export default class ScheduledMaintenanceTemplate extends BaseModel {
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceTemplate,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditScheduledMaintenanceNoteTemplate,
],
})
@TableColumn({
isDefaultValueColumn: false,
@@ -816,7 +821,12 @@ export default class ScheduledMaintenanceTemplate extends BaseModel {
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceTemplate,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditScheduledMaintenanceNoteTemplate,
],
})
@TableColumn({
isDefaultValueColumn: false,
@@ -845,7 +855,12 @@ export default class ScheduledMaintenanceTemplate extends BaseModel {
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceTemplate,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditScheduledMaintenanceNoteTemplate,
],
})
@TableColumn({
isDefaultValueColumn: true,
@@ -873,7 +888,12 @@ export default class ScheduledMaintenanceTemplate extends BaseModel {
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceTemplate,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditScheduledMaintenanceNoteTemplate,
],
})
@TableColumn({
isDefaultValueColumn: true,
@@ -902,7 +922,12 @@ export default class ScheduledMaintenanceTemplate extends BaseModel {
Permission.ProjectMember,
Permission.ReadScheduledMaintenanceTemplate,
],
update: [],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditScheduledMaintenanceNoteTemplate,
],
})
@TableColumn({
isDefaultValueColumn: true,

View File

@@ -434,6 +434,63 @@ export default class StatusPageSubscriber extends BaseModel {
})
public deletedByUserId?: ObjectID = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateStatusPageSubscriber,
Permission.Public,
],
read: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.ReadStatusPageSubscriber,
],
update: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.EditStatusPageSubscriber,
],
})
@TableColumn({
isDefaultValueColumn: true,
type: TableColumnType.Boolean,
title: "Is Subscription Confirmed",
description:
"Has subscriber confirmed their subscription? (for example, by clicking on a confirmation link in an email)",
})
@Column({
type: ColumnType.Boolean,
default: false,
})
public isSubscriptionConfirmed?: boolean = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,
Permission.ProjectAdmin,
Permission.ProjectMember,
Permission.CreateStatusPageSubscriber,
],
read: [],
update: [],
})
@TableColumn({
isDefaultValueColumn: false,
type: TableColumnType.ShortText,
title: "Subscription Confirmation Token",
description:
"Token used to confirm subscription. This is a random token that is sent to the subscriber's email address to confirm their subscription.",
})
@Column({
type: ColumnType.ShortText,
nullable: true,
})
public subscriptionConfirmationToken?: string = undefined;
@ColumnAccessControl({
create: [
Permission.ProjectOwner,

View File

@@ -221,14 +221,14 @@ export default class TelemetryException extends DatabaseBaseModel {
})
@TableColumn({
required: false,
type: TableColumnType.LongText,
type: TableColumnType.VeryLongText,
canReadOnRelationQuery: false,
title: "Exception Message",
description: "Exception message that was thrown by the telemetry service",
})
@Column({
nullable: true,
type: ColumnType.LongText,
type: ColumnType.VeryLongText,
})
public message?: string = undefined;
@@ -252,7 +252,7 @@ export default class TelemetryException extends DatabaseBaseModel {
})
@TableColumn({
required: false,
type: TableColumnType.LongText,
type: TableColumnType.VeryLongText,
canReadOnRelationQuery: false,
title: "Stack Trace",
description:
@@ -260,7 +260,7 @@ export default class TelemetryException extends DatabaseBaseModel {
})
@Column({
nullable: true,
type: ColumnType.LongText,
type: ColumnType.VeryLongText,
})
public stackTrace?: string = undefined;
@@ -284,7 +284,7 @@ export default class TelemetryException extends DatabaseBaseModel {
})
@TableColumn({
required: false,
type: TableColumnType.LongText,
type: TableColumnType.VeryLongText,
canReadOnRelationQuery: false,
title: "Exception Type",
description:
@@ -292,7 +292,7 @@ export default class TelemetryException extends DatabaseBaseModel {
})
@Column({
nullable: true,
type: ColumnType.LongText,
type: ColumnType.VeryLongText,
})
public exceptionType?: string = undefined;

View File

@@ -291,6 +291,17 @@ export default class BaseAnalyticsAPI<
) as any;
}
let groupBy: GroupBy<AnalyticsDataModel> | null =
req.body["groupBy"] || null;
if (groupBy && Object.keys(groupBy).length > 0) {
groupBy = JSONFunctions.deserialize(groupBy as JSONObject) as any;
}
if (groupBy && Object.keys(groupBy).length === 0) {
groupBy = null;
}
if (!aggregateBy) {
throw new BadRequestException("AggregateBy is required");
}

View File

@@ -13,7 +13,7 @@ import LIMIT_MAX from "Common/Types/Database/LimitMax";
import PositiveNumber from "Common/Types/PositiveNumber";
import Probe from "Common/Models/DatabaseModels/Probe";
export default class Ingestor extends BaseAPI<Probe, ProbeServiceType> {
export default class ProbeAPI extends BaseAPI<Probe, ProbeServiceType> {
public constructor() {
super(Probe, ProbeService);

View File

@@ -1,3 +1,4 @@
import BadRequestException from "../../Types/Exception/BadRequestException";
import LocalCache from "../Infrastructure/LocalCache";
import Express, {
ExpressRequest,
@@ -13,6 +14,9 @@ import ServerException from "Common/Types/Exception/ServerException";
export interface StatusAPIOptions {
readyCheck: () => Promise<void>;
liveCheck: () => Promise<void>;
globalCacheCheck?: (() => Promise<void>) | undefined;
analyticsDatabaseCheck?: (() => Promise<void>) | undefined;
databaseCheck?: (() => Promise<void>) | undefined;
}
export default class StatusAPI {
@@ -103,7 +107,7 @@ export default class StatusAPI {
async (req: ExpressRequest, res: ExpressResponse) => {
try {
logger.debug("Live check");
await options.readyCheck();
await options.liveCheck();
logger.info("Live check: ok");
stausLiveSuccess.add(1);
@@ -123,6 +127,94 @@ export default class StatusAPI {
},
);
// Global cache check
router.get(
"/status/global-cache",
async (req: ExpressRequest, res: ExpressResponse) => {
try {
logger.debug("Global cache check");
if (options.globalCacheCheck) {
await options.globalCacheCheck();
} else {
throw new BadRequestException("Global cache check not implemented");
}
logger.info("Global cache check: ok");
Response.sendJsonObjectResponse(req, res, {
status: "ok",
});
} catch (e) {
Response.sendErrorResponse(
req,
res,
e instanceof Exception
? e
: new ServerException("Global cache is not ready"),
);
}
},
);
// Analytics database check
router.get(
"/status/analytics-database",
async (req: ExpressRequest, res: ExpressResponse) => {
try {
logger.debug("Analytics database check");
if (options.analyticsDatabaseCheck) {
await options.analyticsDatabaseCheck();
} else {
throw new BadRequestException(
"Analytics database check not implemented",
);
}
logger.info("Analytics database check: ok");
Response.sendJsonObjectResponse(req, res, {
status: "ok",
});
} catch (e) {
Response.sendErrorResponse(
req,
res,
e instanceof Exception
? e
: new ServerException("Analytics database is not ready"),
);
}
},
);
// Database check
router.get(
"/status/database",
async (req: ExpressRequest, res: ExpressResponse) => {
try {
logger.debug("Database check");
if (options.databaseCheck) {
await options.databaseCheck();
} else {
throw new BadRequestException("Database check not implemented");
}
logger.info("Database check: ok");
Response.sendJsonObjectResponse(req, res, {
status: "ok",
});
} catch (e) {
Response.sendErrorResponse(
req,
res,
e instanceof Exception
? e
: new ServerException("Database is not ready"),
);
}
},
);
return router;
}
}

View File

@@ -82,6 +82,66 @@ export default class StatusPageAPI extends BaseAPI<
public constructor() {
super(StatusPage, StatusPageService);
// confirm subscription api
this.router.get(
`${new this.entityType()
.getCrudApiPath()
?.toString()}/confirm-subscription/:statusPageSubscriberId`,
async (req: ExpressRequest, res: ExpressResponse) => {
const token: string = req.query["verification-token"] as string;
const statusPageSubscriberId: ObjectID = new ObjectID(
req.params["statusPageSubscriberId"] as string,
);
const subscriber: StatusPageSubscriber | null =
await StatusPageSubscriberService.findOneBy({
query: {
_id: statusPageSubscriberId,
subscriptionConfirmationToken: token,
},
select: {
isSubscriptionConfirmed: true,
},
props: {
isRoot: true,
},
});
if (!subscriber) {
return Response.sendErrorResponse(
req,
res,
new NotFoundException(
"Subscriber not found or confirmation token is invalid",
),
);
}
// check if subscription confirmed already.
if (subscriber.isSubscriptionConfirmed) {
return Response.sendEmptySuccessResponse(req, res);
}
await StatusPageSubscriberService.updateOneById({
id: statusPageSubscriberId,
data: {
isSubscriptionConfirmed: true,
},
props: {
isRoot: true,
},
});
await StatusPageSubscriberService.sendYouHaveSubscribedEmail({
subscriberId: statusPageSubscriberId,
});
return Response.sendEmptySuccessResponse(req, res);
},
);
// CNAME verification api
this.router.get(
`${new this.entityType()
@@ -1033,7 +1093,18 @@ export default class StatusPageAPI extends BaseAPI<
},
});
const overallStatus: MonitorStatus | null =
this.getOverallMonitorStatus(
statusPageResources,
monitorStatuses,
monitorGroupCurrentStatuses,
);
const response: JSONObject = {
overallStatus: overallStatus
? BaseModel.toJSON(overallStatus, MonitorStatus)
: null,
scheduledMaintenanceEventsPublicNotes: BaseModel.toJSONArray(
scheduledMaintenanceEventsPublicNotes,
ScheduledMaintenancePublicNote,
@@ -2224,4 +2295,54 @@ export default class StatusPageAPI extends BaseAPI<
return response;
}
public getOverallMonitorStatus(
statusPageResources: Array<StatusPageResource>,
monitorStatuses: Array<MonitorStatus>,
monitorGroupCurrentStatuses: Dictionary<ObjectID>,
): MonitorStatus | null {
let currentStatus: MonitorStatus | null =
monitorStatuses.length > 0 && monitorStatuses[0]
? monitorStatuses[0]
: null;
const dict: Dictionary<number> = {};
for (const resource of statusPageResources) {
if (resource.monitor?.currentMonitorStatusId) {
if (
!Object.keys(dict).includes(
resource.monitor?.currentMonitorStatusId.toString() || "",
)
) {
dict[resource.monitor?.currentMonitorStatusId?.toString()] = 1;
} else {
dict[resource.monitor!.currentMonitorStatusId!.toString()]!++;
}
}
}
// check status of monitor groups.
for (const groupId in monitorGroupCurrentStatuses) {
const statusId: ObjectID | undefined =
monitorGroupCurrentStatuses[groupId];
if (statusId) {
if (!Object.keys(dict).includes(statusId.toString() || "")) {
dict[statusId.toString()] = 1;
} else {
dict[statusId.toString()]!++;
}
}
}
for (const monitorStatus of monitorStatuses) {
if (monitorStatus._id && dict[monitorStatus._id]) {
currentStatus = monitorStatus;
}
}
return currentStatus;
}
}

View File

@@ -18,6 +18,7 @@ export enum ConfigLogLevel {
WARN = "WARN",
ERROR = "ERROR",
DEBUG = "DEBUG",
OFF = "OFF",
}
export const getAllEnvVars: () => JSONObject = (): JSONObject => {
@@ -88,9 +89,27 @@ export const AppApiHostname: Hostname = Hostname.fromString(
}`,
);
export const IngestorHostname: Hostname = Hostname.fromString(
`${process.env["SERVER_INGESTOR_HOSTNAME"] || "localhost"}:${
process.env["INGESTOR_PORT"] || 80
export const ProbeIngestHostname: Hostname = Hostname.fromString(
`${process.env["SERVER_PROBE_INGEST_HOSTNAME"] || "localhost"}:${
process.env["PROBE_INGEST_PORT"] || 80
}`,
);
export const OpenTelemetryIngestHostname: Hostname = Hostname.fromString(
`${process.env["SERVER_OPEN_TELEMETRY_INGEST_HOSTNAME"] || "localhost"}:${
process.env["OPEN_TELEMETRY_INGEST_PORT"] || 80
}`,
);
export const IncomingRequestIngestHostname: Hostname = Hostname.fromString(
`${process.env["SERVER_INCOMING_REQUEST_INGEST_HOSTNAME"] || "localhost"}:${
process.env["INCOMING_REQUEST_INGEST_PORT"] || 80
}`,
);
export const FluentIngestHostname: Hostname = Hostname.fromString(
`${process.env["SERVER_FLUENT_INGEST_HOSTNAME"] || "localhost"}:${
process.env["FLUENT_INGEST_PORT"] || 80
}`,
);
@@ -162,6 +181,10 @@ export const ShouldRedisTlsEnable: boolean = Boolean(
RedisTlsCa || (RedisTlsCert && RedisTlsKey),
);
export const RedisIPFamily: number = process.env["REDIS_IP_FAMILY"]
? Number(process.env["REDIS_IP_FAMILY"])
: 4;
export const IsProduction: boolean =
process.env["ENVIRONMENT"] === "production";
@@ -179,6 +202,9 @@ export const AnalyticsHost: string = process.env["ANALYTICS_HOST"] || "";
export const DisableAutomaticIncidentCreation: boolean =
process.env["DISABLE_AUTOMATIC_INCIDENT_CREATION"] === "true";
export const DisableAutomaticAlertCreation: boolean =
process.env["DISABLE_AUTOMATIC_ALERT_CREATION"] === "true";
export const ClickhouseHost: Hostname = Hostname.fromString(
process.env["CLICKHOUSE_HOST"] || "clickhouse",
);

View File

@@ -1,3 +1,4 @@
import { NodeClickHouseClientConfigOptions } from "@clickhouse/client/dist/config";
import {
ClickHouseIsHostHttps,
ClickhouseDatabase,
@@ -10,14 +11,17 @@ import {
ClickhouseUsername,
ShouldClickhouseSslEnable,
} from "../EnvironmentConfig";
import { NodeClickHouseClientConfigOptions } from "@clickhouse/client/dist/client";
import Hostname from "../../Types/API/Hostname";
export type ClickHouseClientConfigOptions = NodeClickHouseClientConfigOptions;
const hostProtocol: string = ClickHouseIsHostHttps ? "https" : "http";
const clickhouseHost: Hostname = ClickhouseHost || new Hostname("clickhouse");
const clickhousePort: string = (ClickhousePort || 8123).toString();
const options: ClickHouseClientConfigOptions = {
host: `${hostProtocol}://${ClickhouseHost.toString()}:${ClickhousePort.toNumber()}`,
url: `${hostProtocol}://${clickhouseHost.toString()}:${clickhousePort}`,
username: ClickhouseUsername,
password: ClickhousePassword,
database: ClickhouseDatabase,

View File

@@ -4,12 +4,16 @@ import {
dataSourceOptions,
testDataSourceOptions,
} from "./ClickhouseConfig";
import { ClickHouseClient, PingResult, createClient } from "@clickhouse/client";
import { PingResult, createClient, ClickHouseClient } from "@clickhouse/client";
import DatabaseNotConnectedException from "Common/Types/Exception/DatabaseNotConnectedException";
import Sleep from "Common/Types/Sleep";
import Stream from "stream";
import API from "../../Utils/API";
import URL from "../../Types/API/URL";
import HTTPErrorResponse from "../../Types/API/HTTPErrorResponse";
import HTTPResponse from "../../Types/API/HTTPResponse";
import { JSONObject } from "../../Types/JSON";
export type ClickhouseClient = ClickHouseClient<Stream.Readable>;
export type ClickhouseClient = ClickHouseClient;
export default class ClickhouseDatabase {
private dataSource!: ClickhouseClient | null;
@@ -101,7 +105,23 @@ export default class ClickhouseDatabase {
public async checkConnnectionStatus(): Promise<boolean> {
// Ping clickhouse to check if the connection is still alive
try {
const result: PingResult | undefined = await this.getDataSource()?.ping();
logger.debug(
"Checking Clickhouse Connection Status - pinging clickhouse",
);
const dbUrl: string | undefined = this.getDatasourceOptions().url as
| string
| undefined;
if (!dbUrl) {
throw new DatabaseNotConnectedException("Clickhouse URL not found");
}
const result: HTTPErrorResponse | HTTPResponse<JSONObject> =
await API.get(URL.fromString(dbUrl.toString()));
logger.debug("Clickhouse Connection Status Result");
logger.debug(result);
if (!result) {
throw new DatabaseNotConnectedException(
@@ -109,13 +129,24 @@ export default class ClickhouseDatabase {
);
}
if (result?.success === false) {
if (result instanceof HTTPErrorResponse) {
throw new DatabaseNotConnectedException(
"Clickhouse Database is not connected",
);
}
return true;
if (
result.data &&
((result.data as JSONObject)["data"] as string) &&
((result.data as JSONObject)["data"] as string).toString().trim() ===
"Ok."
) {
return true;
}
throw new DatabaseNotConnectedException(
"Clickhouse Database is not connected",
);
} catch (err) {
logger.error("Clickhouse Connection Lost");
logger.error(err);

View File

@@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1731433043136 implements MigrationInterface {
public name = "MigrationName1731433043136";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_5218e92f700d91afe6a8db79cb"`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_5218e92f700d91afe6a8db79cb" ON "Incident" ("rootCause") `,
);
}
}

View File

@@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1731433309124 implements MigrationInterface {
public name = "MigrationName1731433309124";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_fc40ea6a9ad55f29bca4f4a15d"`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_fc40ea6a9ad55f29bca4f4a15d" ON "Alert" ("rootCause") `,
);
}
}

View File

@@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1731435267537 implements MigrationInterface {
public name = "MigrationName1731435267537";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_01ac1d1ef9e72aeb6dac6575dd"`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_01ac1d1ef9e72aeb6dac6575dd" ON "MonitorStatusTimeline" ("rootCause") `,
);
}
}

View File

@@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1731435514287 implements MigrationInterface {
public name = "MigrationName1731435514287";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_7db6b1a8fbbc9eb44c2e7f5047"`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_7db6b1a8fbbc9eb44c2e7f5047" ON "IncidentStateTimeline" ("rootCause") `,
);
}
}

View File

@@ -0,0 +1,30 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1732553444010 implements MigrationInterface {
public name = "MigrationName1732553444010";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "message" TYPE text`,
);
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "stackTrace" TYPE text`,
);
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "exceptionType" TYPE text`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
// revert changes made in up method - text to varchar
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "message" TYPE varchar`,
);
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "stackTrace" TYPE varchar`,
);
await queryRunner.query(
`ALTER TABLE "TelemetryException" ALTER COLUMN "exceptionType" TYPE varchar`,
);
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1734435866602 implements MigrationInterface {
public name = "MigrationName1734435866602";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "StatusPageSubscriber" ADD "isSubscriptionConfirmed" boolean NOT NULL DEFAULT false`,
);
await queryRunner.query(
`ALTER TABLE "StatusPageSubscriber" ADD "subscriptionConfirmationToken" character varying`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "StatusPageSubscriber" DROP COLUMN "subscriptionConfirmationToken"`,
);
await queryRunner.query(
`ALTER TABLE "StatusPageSubscriber" DROP COLUMN "isSubscriptionConfirmed"`,
);
}
}

View File

@@ -0,0 +1,51 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736364478985 implements MigrationInterface {
public name = "MigrationName1736364478985";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "IncidentLog" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "incidentId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "logInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "incidentLogEvent" character varying NOT NULL, CONSTRAINT "PK_947cb9f32cf204561d10d64adeb" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_855797e41af7d35b18a7f3f97b" ON "IncidentLog" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_1eff2f3d075754ef9c16e8b962" ON "IncidentLog" ("incidentId") `,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" ADD CONSTRAINT "FK_855797e41af7d35b18a7f3f97bd" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" ADD CONSTRAINT "FK_1eff2f3d075754ef9c16e8b962c" FOREIGN KEY ("incidentId") REFERENCES "Incident"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" ADD CONSTRAINT "FK_da6bb8bf63b18a7ddc35cc2901a" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" ADD CONSTRAINT "FK_bb1b8b83ffdfc702088b74f2e16" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentLog" DROP CONSTRAINT "FK_bb1b8b83ffdfc702088b74f2e16"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" DROP CONSTRAINT "FK_da6bb8bf63b18a7ddc35cc2901a"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" DROP CONSTRAINT "FK_1eff2f3d075754ef9c16e8b962c"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" DROP CONSTRAINT "FK_855797e41af7d35b18a7f3f97bd"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_1eff2f3d075754ef9c16e8b962"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_855797e41af7d35b18a7f3f97b"`,
);
await queryRunner.query(`DROP TABLE "IncidentLog"`);
}
}

View File

@@ -0,0 +1,63 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736364957990 implements MigrationInterface {
public name = "MigrationName1736364957990";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "AlertLog" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "alertId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "logInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "alertLogEvent" character varying NOT NULL, CONSTRAINT "PK_500826238fa54528b0026f55d47" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_d5d56f9ed2c4c72745372a1ac6" ON "AlertLog" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_52bbabed66e4e728441d49478f" ON "AlertLog" ("alertId") `,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "rotation" SET DEFAULT '{"_type":"Recurring","value":{"intervalType":"Day","intervalCount":{"_type":"PositiveNumber","value":1}}}'`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type":"RestrictionTimes","value":{"restictionType":"None","dayRestrictionTimes":null,"weeklyRestrictionTimes":[]}}'`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" ADD CONSTRAINT "FK_d5d56f9ed2c4c72745372a1ac6f" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" ADD CONSTRAINT "FK_52bbabed66e4e728441d49478f8" FOREIGN KEY ("alertId") REFERENCES "Alert"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" ADD CONSTRAINT "FK_f5f832aad105579e95a09e1ddd0" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" ADD CONSTRAINT "FK_7ca9046915f6de6e7a199588d26" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "AlertLog" DROP CONSTRAINT "FK_7ca9046915f6de6e7a199588d26"`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" DROP CONSTRAINT "FK_f5f832aad105579e95a09e1ddd0"`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" DROP CONSTRAINT "FK_52bbabed66e4e728441d49478f8"`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" DROP CONSTRAINT "FK_d5d56f9ed2c4c72745372a1ac6f"`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type": "RestrictionTimes", "value": {"restictionType": "None", "dayRestrictionTimes": null, "weeklyRestrictionTimes": []}}'`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "rotation" SET DEFAULT '{"_type": "Recurring", "value": {"intervalType": "Day", "intervalCount": {"_type": "PositiveNumber", "value": 1}}}'`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_52bbabed66e4e728441d49478f"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_d5d56f9ed2c4c72745372a1ac6"`,
);
await queryRunner.query(`DROP TABLE "AlertLog"`);
}
}

View File

@@ -0,0 +1,51 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736365532085 implements MigrationInterface {
public name = "MigrationName1736365532085";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "ScheduledMaintenanceLog" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "scheduledMaintenanceId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "logInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "scheduledMaintenanceLogEvent" character varying NOT NULL, CONSTRAINT "PK_27b89f28bf48418fabba9a1ea14" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_9239de1ee33f9505c30f255a99" ON "ScheduledMaintenanceLog" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_58e403ba261dfa94addb5f04d3" ON "ScheduledMaintenanceLog" ("scheduledMaintenanceId") `,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" ADD CONSTRAINT "FK_9239de1ee33f9505c30f255a994" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" ADD CONSTRAINT "FK_58e403ba261dfa94addb5f04d36" FOREIGN KEY ("scheduledMaintenanceId") REFERENCES "ScheduledMaintenance"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" ADD CONSTRAINT "FK_9152528e4f7f59adaba3e9bc41f" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" ADD CONSTRAINT "FK_a957f435d1504f41808f20a2c45" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" DROP CONSTRAINT "FK_a957f435d1504f41808f20a2c45"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" DROP CONSTRAINT "FK_9152528e4f7f59adaba3e9bc41f"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" DROP CONSTRAINT "FK_58e403ba261dfa94addb5f04d36"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" DROP CONSTRAINT "FK_9239de1ee33f9505c30f255a994"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_58e403ba261dfa94addb5f04d3"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_9239de1ee33f9505c30f255a99"`,
);
await queryRunner.query(`DROP TABLE "ScheduledMaintenanceLog"`);
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736675947746 implements MigrationInterface {
public name = "MigrationName1736675947746";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Incident" ADD "incidentNumber" integer`,
);
await queryRunner.query(
`CREATE INDEX "IDX_0eca9ce7d12a4c472386dfc781" ON "Incident" ("incidentNumber") `,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`DROP INDEX "public"."IDX_0eca9ce7d12a4c472386dfc781"`,
);
await queryRunner.query(
`ALTER TABLE "Incident" DROP COLUMN "incidentNumber"`,
);
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736703138918 implements MigrationInterface {
public name = "MigrationName1736703138918";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentLog" ADD "incidentLogSeverity" character varying NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" ADD "alertLogSeverity" character varying NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" ADD "scheduledMaintenanceLogSeverity" character varying NOT NULL`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceLog" DROP COLUMN "scheduledMaintenanceLogSeverity"`,
);
await queryRunner.query(
`ALTER TABLE "AlertLog" DROP COLUMN "alertLogSeverity"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentLog" DROP COLUMN "incidentLogSeverity"`,
);
}
}

View File

@@ -0,0 +1,137 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736780194077 implements MigrationInterface {
public name = "MigrationName1736780194077";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "IncidentFeed" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "incidentId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "feedInfoInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "incidentFeedEventType" character varying NOT NULL, "displayColor" character varying(7) NOT NULL, CONSTRAINT "PK_8188c79d1ed22013205ff324dea" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_32ae47fa45018ecdb7f28c6468" ON "IncidentFeed" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_cf4aea7310bb855873fc40f244" ON "IncidentFeed" ("incidentId") `,
);
await queryRunner.query(
`CREATE TABLE "AlertFeed" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "alertId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "feedInfoInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "alertFeedEventType" character varying NOT NULL, "displayColor" character varying(7) NOT NULL, CONSTRAINT "PK_d5f629abd40a51d58a35423b361" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_f430519f21c327c14c12e4f106" ON "AlertFeed" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_f74177b6675d92243cc0794bd3" ON "AlertFeed" ("alertId") `,
);
await queryRunner.query(
`CREATE TABLE "ScheduledMaintenanceFeed" ("_id" uuid NOT NULL DEFAULT uuid_generate_v4(), "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deletedAt" TIMESTAMP WITH TIME ZONE, "version" integer NOT NULL, "projectId" uuid NOT NULL, "scheduledMaintenanceId" uuid NOT NULL, "createdByUserId" uuid, "deletedByUserId" uuid, "feedInfoInMarkdown" text NOT NULL, "moreInformationInMarkdown" text NOT NULL, "scheduledMaintenanceFeedEventType" character varying NOT NULL, "displayColor" character varying(7) NOT NULL, CONSTRAINT "PK_ced33ccb5551624e432b2df6513" PRIMARY KEY ("_id"))`,
);
await queryRunner.query(
`CREATE INDEX "IDX_416c6ded7f17b15e9a83114740" ON "ScheduledMaintenanceFeed" ("projectId") `,
);
await queryRunner.query(
`CREATE INDEX "IDX_ce3b353bbd3e1695c0ffb2d235" ON "ScheduledMaintenanceFeed" ("scheduledMaintenanceId") `,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD CONSTRAINT "FK_32ae47fa45018ecdb7f28c64685" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD CONSTRAINT "FK_cf4aea7310bb855873fc40f2441" FOREIGN KEY ("incidentId") REFERENCES "Incident"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD CONSTRAINT "FK_4458fd00d52521ae4333e74ddbd" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD CONSTRAINT "FK_f1ee9faba64e96f91925247aae3" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD CONSTRAINT "FK_f430519f21c327c14c12e4f1063" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD CONSTRAINT "FK_f74177b6675d92243cc0794bd3f" FOREIGN KEY ("alertId") REFERENCES "Alert"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD CONSTRAINT "FK_2eda7dbbc78de28f653812b5e3d" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD CONSTRAINT "FK_f0e72673c38f18ed84f0e94a5a1" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD CONSTRAINT "FK_416c6ded7f17b15e9a831147403" FOREIGN KEY ("projectId") REFERENCES "Project"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD CONSTRAINT "FK_ce3b353bbd3e1695c0ffb2d2354" FOREIGN KEY ("scheduledMaintenanceId") REFERENCES "ScheduledMaintenance"("_id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD CONSTRAINT "FK_fc34cf1a5eb488310bbe7c6a46a" FOREIGN KEY ("createdByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD CONSTRAINT "FK_8374052884c5d75f5018c1dc908" FOREIGN KEY ("deletedByUserId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP CONSTRAINT "FK_8374052884c5d75f5018c1dc908"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP CONSTRAINT "FK_fc34cf1a5eb488310bbe7c6a46a"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP CONSTRAINT "FK_ce3b353bbd3e1695c0ffb2d2354"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP CONSTRAINT "FK_416c6ded7f17b15e9a831147403"`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" DROP CONSTRAINT "FK_f0e72673c38f18ed84f0e94a5a1"`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" DROP CONSTRAINT "FK_2eda7dbbc78de28f653812b5e3d"`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" DROP CONSTRAINT "FK_f74177b6675d92243cc0794bd3f"`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" DROP CONSTRAINT "FK_f430519f21c327c14c12e4f1063"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP CONSTRAINT "FK_f1ee9faba64e96f91925247aae3"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP CONSTRAINT "FK_4458fd00d52521ae4333e74ddbd"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP CONSTRAINT "FK_cf4aea7310bb855873fc40f2441"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP CONSTRAINT "FK_32ae47fa45018ecdb7f28c64685"`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "restrictionTimes" SET DEFAULT '{"_type": "RestrictionTimes", "value": {"restictionType": "None", "dayRestrictionTimes": null, "weeklyRestrictionTimes": []}}'`,
);
await queryRunner.query(
`ALTER TABLE "OnCallDutyPolicyScheduleLayer" ALTER COLUMN "rotation" SET DEFAULT '{"_type": "Recurring", "value": {"intervalType": "Day", "intervalCount": {"_type": "PositiveNumber", "value": 1}}}'`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_ce3b353bbd3e1695c0ffb2d235"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_416c6ded7f17b15e9a83114740"`,
);
await queryRunner.query(`DROP TABLE "ScheduledMaintenanceFeed"`);
await queryRunner.query(
`DROP INDEX "public"."IDX_f74177b6675d92243cc0794bd3"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_f430519f21c327c14c12e4f106"`,
);
await queryRunner.query(`DROP TABLE "AlertFeed"`);
await queryRunner.query(
`DROP INDEX "public"."IDX_cf4aea7310bb855873fc40f244"`,
);
await queryRunner.query(
`DROP INDEX "public"."IDX_32ae47fa45018ecdb7f28c6468"`,
);
await queryRunner.query(`DROP TABLE "IncidentFeed"`);
}
}

View File

@@ -0,0 +1,16 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736780194078 implements MigrationInterface {
public name = "MigrationName1736780194078";
public async up(queryRunner: QueryRunner): Promise<void> {
// drop tables IncidentLog, AlertLog, ScheduledMaintenanceLog
await queryRunner.query(`DROP TABLE "IncidentLog"`);
await queryRunner.query(`DROP TABLE "AlertLog"`);
await queryRunner.query(`DROP TABLE "ScheduledMaintenanceLog"`);
}
public async down(_queryRunner: QueryRunner): Promise<void> {
// do nothing.
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736787495707 implements MigrationInterface {
public name = "MigrationName1736787495707";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ALTER COLUMN "moreInformationInMarkdown" DROP NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ALTER COLUMN "moreInformationInMarkdown" DROP NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ALTER COLUMN "moreInformationInMarkdown" DROP NOT NULL`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ALTER COLUMN "moreInformationInMarkdown" SET NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ALTER COLUMN "moreInformationInMarkdown" SET NOT NULL`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ALTER COLUMN "moreInformationInMarkdown" SET NOT NULL`,
);
}
}

View File

@@ -0,0 +1,83 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736787985322 implements MigrationInterface {
public name = "MigrationName1736787985322";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Label" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentSeverity" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentState" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "MonitorStatus" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ALTER COLUMN "displayColor" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "AlertSeverity" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "AlertState" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceState" ALTER COLUMN "color" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "StatusPage" ALTER COLUMN "defaultBarColor" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "StatusPageHistoryChartBarColorRule" ALTER COLUMN "barColor" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ALTER COLUMN "displayColor" TYPE character varying(10)`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ALTER COLUMN "displayColor" TYPE character varying(10)`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "Label" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentSeverity" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentState" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "MonitorStatus" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ALTER COLUMN "displayColor" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "AlertSeverity" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "AlertState" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceState" ALTER COLUMN "color" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "StatusPage" ALTER COLUMN "defaultBarColor" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "StatusPageHistoryChartBarColorRule" ALTER COLUMN "barColor" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ALTER COLUMN "displayColor" TYPE character varying(7)`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ALTER COLUMN "displayColor" TYPE character varying(7)`,
);
}
}

View File

@@ -0,0 +1,39 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736788706141 implements MigrationInterface {
public name = "MigrationName1736788706141";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "IncidentFeed" ADD "userId" uuid`);
await queryRunner.query(`ALTER TABLE "AlertFeed" ADD "userId" uuid`);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD "userId" uuid`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD CONSTRAINT "FK_010577090e59583da93c867f541" FOREIGN KEY ("userId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD CONSTRAINT "FK_97b19fbc90b6105614cc0cba300" FOREIGN KEY ("userId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD CONSTRAINT "FK_541c2b40579cbf342c8850ced2b" FOREIGN KEY ("userId") REFERENCES "User"("_id") ON DELETE SET NULL ON UPDATE NO ACTION`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP CONSTRAINT "FK_541c2b40579cbf342c8850ced2b"`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" DROP CONSTRAINT "FK_97b19fbc90b6105614cc0cba300"`,
);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP CONSTRAINT "FK_010577090e59583da93c867f541"`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP COLUMN "userId"`,
);
await queryRunner.query(`ALTER TABLE "AlertFeed" DROP COLUMN "userId"`);
await queryRunner.query(`ALTER TABLE "IncidentFeed" DROP COLUMN "userId"`);
}
}

View File

@@ -0,0 +1,27 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class MigrationName1736856662868 implements MigrationInterface {
public name = "MigrationName1736856662868";
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "IncidentFeed" ADD "postedAt" TIMESTAMP WITH TIME ZONE`,
);
await queryRunner.query(
`ALTER TABLE "AlertFeed" ADD "postedAt" TIMESTAMP WITH TIME ZONE`,
);
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" ADD "postedAt" TIMESTAMP WITH TIME ZONE`,
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "ScheduledMaintenanceFeed" DROP COLUMN "postedAt"`,
);
await queryRunner.query(`ALTER TABLE "AlertFeed" DROP COLUMN "postedAt"`);
await queryRunner.query(
`ALTER TABLE "IncidentFeed" DROP COLUMN "postedAt"`,
);
}
}

View File

@@ -78,6 +78,23 @@ import { MigrationName1729682875503 } from "./1729682875503-MigrationName";
import { MigrationName1730117995642 } from "./1730117995642-MigrationName";
import { MigrationName1730209089495 } from "./1730209089495-MigrationName";
import { MigrationName1730223198692 } from "./1730223198692-MigrationName";
import { MigrationName1731433043136 } from "./1731433043136-MigrationName";
import { MigrationName1731433309124 } from "./1731433309124-MigrationName";
import { MigrationName1731435267537 } from "./1731435267537-MigrationName";
import { MigrationName1731435514287 } from "./1731435514287-MigrationName";
import { MigrationName1732553444010 } from "./1732553444010-MigrationName";
import { MigrationName1734435866602 } from "./1734435866602-MigrationName";
import { MigrationName1736364478985 } from "./1736364478985-MigrationName";
import { MigrationName1736364957990 } from "./1736364957990-MigrationName";
import { MigrationName1736365532085 } from "./1736365532085-MigrationName";
import { MigrationName1736675947746 } from "./1736675947746-MigrationName";
import { MigrationName1736703138918 } from "./1736703138918-MigrationName";
import { MigrationName1736780194077 } from "./1736780194077-MigrationName";
import { MigrationName1736780194078 } from "./1736780194078-MigrationName";
import { MigrationName1736787495707 } from "./1736787495707-MigrationName";
import { MigrationName1736787985322 } from "./1736787985322-MigrationName";
import { MigrationName1736788706141 } from "./1736788706141-MigrationName";
import { MigrationName1736856662868 } from "./1736856662868-MigrationName";
export default [
InitialMigration,
@@ -160,4 +177,21 @@ export default [
MigrationName1730117995642,
MigrationName1730209089495,
MigrationName1730223198692,
MigrationName1731433043136,
MigrationName1731433309124,
MigrationName1731435267537,
MigrationName1731435514287,
MigrationName1732553444010,
MigrationName1734435866602,
MigrationName1736364478985,
MigrationName1736364957990,
MigrationName1736365532085,
MigrationName1736675947746,
MigrationName1736703138918,
MigrationName1736780194077,
MigrationName1736780194078,
MigrationName1736787495707,
MigrationName1736787985322,
MigrationName1736788706141,
MigrationName1736856662868,
];

View File

@@ -1,6 +1,7 @@
import {
RedisDb,
RedisHostname,
RedisIPFamily,
RedisPassword,
RedisPort,
RedisTlsCa,
@@ -40,6 +41,7 @@ export default abstract class Redis {
password: RedisPassword,
db: RedisDb,
enableTLSForSentinelMode: RedisTlsSentinelMode,
family: RedisIPFamily,
lazyConnect: true,
};

View File

@@ -1,5 +1,5 @@
import Redis, { ClientType } from "./Redis";
import { Mutex } from "redis-semaphore";
import { Mutex, LockOptions } from "redis-semaphore";
export type SemaphoreMutex = Mutex;
@@ -8,7 +8,8 @@ export default class Semaphore {
public static async lock(data: {
key: string;
namespace: string;
lockTimeout?: number;
lockTimeout?: number | undefined;
acquireTimeout?: number | undefined;
}): Promise<SemaphoreMutex> {
if (!data.lockTimeout) {
data.lockTimeout = 5000;
@@ -22,12 +23,20 @@ export default class Semaphore {
throw new Error("Redis client is not connected");
}
const lockOptions: LockOptions = {};
if (data.lockTimeout) {
lockOptions.lockTimeout = data.lockTimeout;
}
if (data.acquireTimeout) {
lockOptions.acquireTimeout = data.acquireTimeout;
}
const mutex: SemaphoreMutex = new Mutex(
client,
data.namespace + "-" + key,
{
lockTimeout: data.lockTimeout,
},
lockOptions,
);
await mutex.acquire();

View File

@@ -1,4 +1,6 @@
// This class checks the status of all the datasources.
import Sleep from "../../Types/Sleep";
import logger from "../Utils/Logger";
import { ClickhouseAppInstance } from "./ClickhouseDatabase";
import PostgresAppInstance from "./PostgresDatabase";
import Redis from "./Redis";
@@ -10,22 +12,58 @@ export default class InfrastructureStatus {
checkPostgresStatus: boolean;
checkClickhouseStatus: boolean;
}): Promise<void> {
logger.debug("Checking infrastructure status");
if (data.checkRedisStatus) {
logger.debug("Checking Redis status");
if (!(await Redis.checkConnnectionStatus())) {
logger.debug("Redis is not connected");
throw new DatabaseNotConnectedException("Redis is not connected");
}
logger.debug("Redis is connected");
}
if (data.checkPostgresStatus) {
logger.debug("Checking Postgres status");
if (!(await PostgresAppInstance.checkConnnectionStatus())) {
logger.debug("Postgres is not connected");
throw new DatabaseNotConnectedException("Postgres is not connected");
}
logger.debug("Postgres is connected");
}
if (data.checkClickhouseStatus) {
logger.debug("Checking Clickhouse status");
if (!(await ClickhouseAppInstance.checkConnnectionStatus())) {
logger.debug("Clickhouse is not connected");
throw new DatabaseNotConnectedException("Clickhouse is not connected");
}
logger.debug("Clickhouse is connected");
}
}
public static async checkStatusWithRetry(data: {
retryCount: number;
checkRedisStatus: boolean;
checkPostgresStatus: boolean;
checkClickhouseStatus: boolean;
}): Promise<void> {
let retry: number = 0;
while (retry < data.retryCount) {
try {
await this.checkStatus({
checkRedisStatus: data.checkRedisStatus,
checkPostgresStatus: data.checkPostgresStatus,
checkClickhouseStatus: data.checkClickhouseStatus,
});
break;
} catch (err) {
logger.error("Error checking infrastructure status");
logger.error(err);
retry++;
await Sleep.sleep(1000);
}
}
}
}

View File

@@ -7,31 +7,35 @@ import {
import Response from "../Utils/Response";
import Dictionary from "Common/Types/Dictionary";
import BadDataException from "Common/Types/Exception/BadDataException";
import ObjectID from "Common/Types/ObjectID";
export default class ClusterKeyAuthorization {
public static getClusterKeyHeaders(): Dictionary<string> {
return {
clusterkey: ONEUPTIME_SECRET.toString(),
clusterkey: ClusterKeyAuthorization.getClusterKey(),
};
}
public static getClusterKey(): string {
// we encode uri component because a lot of people use special characters in their cluster key secret
return encodeURIComponent(ONEUPTIME_SECRET.toString());
}
public static async isAuthorizedServiceMiddleware(
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction,
): Promise<void> {
let clusterKey: ObjectID;
let clusterKey: string;
if (req.params && req.params["clusterKey"]) {
clusterKey = new ObjectID(req.params["clusterKey"]);
clusterKey = req.params["clusterKey"];
} else if (req.query && req.query["clusterKey"]) {
clusterKey = new ObjectID(req.query["clusterKey"] as string);
clusterKey = req.query["clusterKey"] as string;
} else if (req.headers && req.headers["clusterkey"]) {
// Header keys are automatically transformed to lowercase
clusterKey = new ObjectID(req.headers["clusterkey"] as string);
clusterKey = req.headers["clusterkey"] as string;
} else if (req.body && req.body.clusterKey) {
clusterKey = new ObjectID(req.body.clusterKey);
clusterKey = req.body.clusterKey;
} else {
return Response.sendErrorResponse(
req,
@@ -41,7 +45,7 @@ export default class ClusterKeyAuthorization {
}
const isAuthorized: boolean =
clusterKey.toString() === ONEUPTIME_SECRET.toString();
clusterKey.toString() === ClusterKeyAuthorization.getClusterKey();
if (!isAuthorized) {
return Response.sendErrorResponse(

View File

@@ -1,14 +1,13 @@
import { ProbeExpressRequest } from "../Types/Request";
import BadRequestException from "Common/Types/Exception/BadRequestException";
import ProductType from "Common/Types/MeteredPlan/ProductType";
import ObjectID from "Common/Types/ObjectID";
import BadRequestException from "../../Types/Exception/BadRequestException";
import ProductType from "../../Types/MeteredPlan/ProductType";
import ObjectID from "../../Types/ObjectID";
import {
ExpressRequest,
ExpressResponse,
NextFunction,
} from "Common/Server/Utils/Express";
import TelemetryIngestionKeyService from "Common/Server/Services/TelemetryIngestionKeyService";
import TelemetryIngestionKey from "Common/Models/DatabaseModels/TelemetryIngestionKey";
} from "../../Server/Utils/Express";
import TelemetryIngestionKeyService from "../../Server/Services/TelemetryIngestionKeyService";
import TelemetryIngestionKey from "../../Models/DatabaseModels/TelemetryIngestionKey";
export interface TelemetryRequest extends ExpressRequest {
projectId: ObjectID; // Project ID
@@ -17,7 +16,7 @@ export interface TelemetryRequest extends ExpressRequest {
export default class TelemetryIngest {
public static async isAuthorizedServiceMiddleware(
req: ProbeExpressRequest,
req: ExpressRequest,
_res: ExpressResponse,
next: NextFunction,
): Promise<void> {

View File

@@ -0,0 +1,81 @@
import { Blue500 } from "../../Types/BrandColors";
import Color from "../../Types/Color";
import OneUptimeDate from "../../Types/Date";
import BadDataException from "../../Types/Exception/BadDataException";
import ObjectID from "../../Types/ObjectID";
import { IsBillingEnabled } from "../EnvironmentConfig";
import DatabaseService from "./DatabaseService";
import Model, {
AlertFeedEventType,
} from "Common/Models/DatabaseModels/AlertFeed";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
}
}
public async createAlertFeed(data: {
alertId: ObjectID;
feedInfoInMarkdown: string;
alertFeedEventType: AlertFeedEventType;
projectId: ObjectID;
moreInformationInMarkdown?: string | undefined;
displayColor?: Color | undefined;
userId?: ObjectID | undefined;
postedAt?: Date | undefined;
}): Promise<Model> {
if (!data.alertId) {
throw new BadDataException("Alert ID is required");
}
if (!data.feedInfoInMarkdown) {
throw new BadDataException("Log in markdown is required");
}
if (!data.alertFeedEventType) {
throw new BadDataException("Alert log event is required");
}
if (!data.projectId) {
throw new BadDataException("Project ID is required");
}
const alertFeed: Model = new Model();
if (!data.displayColor) {
data.displayColor = Blue500;
}
if (data.userId) {
alertFeed.userId = data.userId;
}
alertFeed.displayColor = data.displayColor;
alertFeed.alertId = data.alertId;
alertFeed.feedInfoInMarkdown = data.feedInfoInMarkdown;
alertFeed.alertFeedEventType = data.alertFeedEventType;
alertFeed.projectId = data.projectId;
if (!data.postedAt) {
alertFeed.postedAt = OneUptimeDate.getCurrentDate();
}
if (data.moreInformationInMarkdown) {
alertFeed.moreInformationInMarkdown = data.moreInformationInMarkdown;
}
return await this.create({
data: alertFeed,
props: {
isRoot: true,
},
});
}
}
export default new Service();

View File

@@ -28,6 +28,16 @@ import AlertState from "Common/Models/DatabaseModels/AlertState";
import AlertStateTimeline from "Common/Models/DatabaseModels/AlertStateTimeline";
import User from "Common/Models/DatabaseModels/User";
import { IsBillingEnabled } from "../EnvironmentConfig";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import logger from "../Utils/Logger";
import TelemetryUtil from "../Utils/Telemetry/Telemetry";
import MetricService from "./MetricService";
import OneUptimeDate from "../../Types/Date";
import Metric, {
MetricPointType,
ServiceType,
} from "../../Models/AnalyticsModels/Metric";
import AlertMetricType from "../../Types/Alerts/AlertMetricType";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -564,5 +574,265 @@ export class Service extends DatabaseService<Model> {
props: props || {},
});
}
public async refreshAlertMetrics(data: { alertId: ObjectID }): Promise<void> {
const alert: Model | null = await this.findOneById({
id: data.alertId,
select: {
projectId: true,
monitor: {
_id: true,
name: true,
},
alertSeverity: {
name: true,
_id: true,
},
},
props: {
isRoot: true,
},
});
if (!alert) {
throw new BadDataException("Alert not found");
}
if (!alert.projectId) {
throw new BadDataException("Incient Project ID not found");
}
// get alert state timeline
const alertStateTimelines: Array<AlertStateTimeline> =
await AlertStateTimelineService.findBy({
query: {
alertId: data.alertId,
},
select: {
projectId: true,
alertStateId: true,
alertState: {
isAcknowledgedState: true,
isResolvedState: true,
},
startsAt: true,
endsAt: true,
},
sort: {
startsAt: SortOrder.Ascending,
},
skip: 0,
limit: LIMIT_PER_PROJECT,
props: {
isRoot: true,
},
});
const firstAlertStateTimeline: AlertStateTimeline | undefined =
alertStateTimelines[0];
// delete all the alert metrics with this alert id because its a refresh.
await MetricService.deleteBy({
query: {
serviceId: data.alertId,
},
props: {
isRoot: true,
},
});
const itemsToSave: Array<Metric> = [];
// now we need to create new metrics for this alert - TimeToAcknowledge, TimeToResolve, AlertCount, AlertDuration
const alertStartsAt: Date =
firstAlertStateTimeline?.startsAt ||
alert.createdAt ||
OneUptimeDate.getCurrentDate();
const alertCountMetric: Metric = new Metric();
alertCountMetric.projectId = alert.projectId;
alertCountMetric.serviceId = alert.id!;
alertCountMetric.serviceType = ServiceType.Alert;
alertCountMetric.name = AlertMetricType.AlertCount;
alertCountMetric.description = "Number of alerts created";
alertCountMetric.value = 1;
alertCountMetric.unit = "";
alertCountMetric.attributes = {
alertId: data.alertId.toString(),
projectId: alert.projectId.toString(),
monitorId: alert.monitor?.id!.toString() || "",
monitorName: alert.monitor?.name!.toString() || "",
alertSeverityId: alert.alertSeverity?.id!.toString() || "",
alertSeverityName: alert.alertSeverity?.name!.toString() || "",
};
alertCountMetric.time = alertStartsAt;
alertCountMetric.timeUnixNano = OneUptimeDate.toUnixNano(
alertCountMetric.time,
);
alertCountMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(alertCountMetric);
// is the alert acknowledged?
const isAlertAcknowledged: boolean = alertStateTimelines.some(
(timeline: AlertStateTimeline) => {
return timeline.alertState?.isAcknowledgedState;
},
);
if (isAlertAcknowledged) {
const ackAlertStateTimeline: AlertStateTimeline | undefined =
alertStateTimelines.find((timeline: AlertStateTimeline) => {
return timeline.alertState?.isAcknowledgedState;
});
if (ackAlertStateTimeline) {
const timeToAcknowledgeMetric: Metric = new Metric();
timeToAcknowledgeMetric.projectId = alert.projectId;
timeToAcknowledgeMetric.serviceId = alert.id!;
timeToAcknowledgeMetric.serviceType = ServiceType.Alert;
timeToAcknowledgeMetric.name = AlertMetricType.TimeToAcknowledge;
timeToAcknowledgeMetric.description =
"Time taken to acknowledge the alert";
timeToAcknowledgeMetric.value = OneUptimeDate.getDifferenceInSeconds(
ackAlertStateTimeline?.startsAt || OneUptimeDate.getCurrentDate(),
alertStartsAt,
);
timeToAcknowledgeMetric.unit = "seconds";
timeToAcknowledgeMetric.attributes = {
alertId: data.alertId.toString(),
projectId: alert.projectId.toString(),
monitorId: alert.monitor?.id!.toString() || "",
monitorName: alert.monitor?.name!.toString() || "",
alertSeverityId: alert.alertSeverity?.id!.toString() || "",
alertSeverityName: alert.alertSeverity?.name!.toString() || "",
};
timeToAcknowledgeMetric.time =
ackAlertStateTimeline?.startsAt ||
alert.createdAt ||
OneUptimeDate.getCurrentDate();
timeToAcknowledgeMetric.timeUnixNano = OneUptimeDate.toUnixNano(
timeToAcknowledgeMetric.time,
);
timeToAcknowledgeMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(timeToAcknowledgeMetric);
}
}
// time to resolve
const isAlertResolved: boolean = alertStateTimelines.some(
(timeline: AlertStateTimeline) => {
return timeline.alertState?.isResolvedState;
},
);
if (isAlertResolved) {
const resolvedAlertStateTimeline: AlertStateTimeline | undefined =
alertStateTimelines.find((timeline: AlertStateTimeline) => {
return timeline.alertState?.isResolvedState;
});
if (resolvedAlertStateTimeline) {
const timeToResolveMetric: Metric = new Metric();
timeToResolveMetric.projectId = alert.projectId;
timeToResolveMetric.serviceId = alert.id!;
timeToResolveMetric.serviceType = ServiceType.Alert;
timeToResolveMetric.name = AlertMetricType.TimeToResolve;
timeToResolveMetric.description = "Time taken to resolve the alert";
timeToResolveMetric.value = OneUptimeDate.getDifferenceInSeconds(
resolvedAlertStateTimeline?.startsAt ||
OneUptimeDate.getCurrentDate(),
alertStartsAt,
);
timeToResolveMetric.unit = "seconds";
timeToResolveMetric.attributes = {
alertId: data.alertId.toString(),
projectId: alert.projectId.toString(),
monitorId: alert.monitor?.id!.toString() || "",
monitorName: alert.monitor?.name!.toString() || "",
alertSeverityId: alert.alertSeverity?.id!.toString() || "",
alertSeverityName: alert.alertSeverity?.name!.toString() || "",
};
timeToResolveMetric.time =
resolvedAlertStateTimeline?.startsAt ||
alert.createdAt ||
OneUptimeDate.getCurrentDate();
timeToResolveMetric.timeUnixNano = OneUptimeDate.toUnixNano(
timeToResolveMetric.time,
);
timeToResolveMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(timeToResolveMetric);
}
}
// alert duration
const alertDurationMetric: Metric = new Metric();
const lastAlertStateTimeline: AlertStateTimeline | undefined =
alertStateTimelines[alertStateTimelines.length - 1];
if (lastAlertStateTimeline) {
const alertEndsAt: Date =
lastAlertStateTimeline.startsAt || OneUptimeDate.getCurrentDate();
// save metric.
alertDurationMetric.projectId = alert.projectId;
alertDurationMetric.serviceId = alert.id!;
alertDurationMetric.serviceType = ServiceType.Alert;
alertDurationMetric.name = AlertMetricType.AlertDuration;
alertDurationMetric.description = "Duration of the alert";
alertDurationMetric.value = OneUptimeDate.getDifferenceInSeconds(
alertEndsAt,
alertStartsAt,
);
alertDurationMetric.unit = "seconds";
alertDurationMetric.attributes = {
alertId: data.alertId.toString(),
projectId: alert.projectId.toString(),
monitorId: alert.monitor?.id!.toString() || "",
monitorName: alert.monitor?.name!.toString() || "",
alertSeverityId: alert.alertSeverity?.id!.toString() || "",
alertSeverityName: alert.alertSeverity?.name!.toString() || "",
};
alertDurationMetric.time =
lastAlertStateTimeline?.startsAt ||
alert.createdAt ||
OneUptimeDate.getCurrentDate();
alertDurationMetric.timeUnixNano = OneUptimeDate.toUnixNano(
alertDurationMetric.time,
);
alertDurationMetric.metricPointType = MetricPointType.Sum;
}
await MetricService.createMany({
items: itemsToSave,
props: {
isRoot: true,
},
});
// index attributes.
TelemetryUtil.indexAttributes({
attributes: ["monitorId", "projectId", "alertId", "monitorNames"],
projectId: alert.projectId,
telemetryType: TelemetryType.Metric,
}).catch((err: Error) => {
logger.error(err);
});
}
}
export default new Service();

View File

@@ -18,6 +18,7 @@ import { IsBillingEnabled } from "../EnvironmentConfig";
import { JSONObject } from "../../Types/JSON";
import AlertInternalNote from "../../Models/DatabaseModels/AlertInternalNote";
import AlertInternalNoteService from "./AlertInternalNoteService";
import logger from "../Utils/Logger";
export class Service extends DatabaseService<AlertStateTimeline> {
public constructor() {
@@ -173,6 +174,15 @@ export class Service extends DatabaseService<AlertStateTimeline> {
props: onCreate.createBy.props,
});
AlertService.refreshAlertMetrics({
alertId: createdItem.alertId,
}).catch((error: Error) => {
logger.error(
"Error while refreshing alert metrics after alert state timeline creation",
);
logger.error(error);
});
return createdItem;
}

View File

@@ -259,6 +259,11 @@ export default class AnalyticsDatabaseService<
// convert date column from string to date.
const groupByColumnName: keyof TBaseModel | undefined =
aggregateBy.groupBy && Object.keys(aggregateBy.groupBy).length > 0
? (Object.keys(aggregateBy.groupBy)[0] as keyof TBaseModel)
: undefined;
for (const item of items) {
if (
!(item as JSONObject)[
@@ -268,6 +273,21 @@ export default class AnalyticsDatabaseService<
continue;
}
// if value is of type string then convert it to number.
if (
typeof (item as JSONObject)[
aggregateBy.aggregateColumnName as string
] === Typeof.String
) {
(item as JSONObject)[aggregateBy.aggregateColumnName as string] =
Number.parseFloat(
(item as JSONObject)[
aggregateBy.aggregateColumnName as string
] as string,
);
}
const aggregatedModel: AggregatedModel = {
timestamp: OneUptimeDate.fromString(
(item as JSONObject)[
@@ -277,6 +297,9 @@ export default class AnalyticsDatabaseService<
value: (item as JSONObject)[
aggregateBy.aggregateColumnName as string
] as number,
[groupByColumnName as string]: (item as JSONObject)[
groupByColumnName as string
],
};
aggregatedItems.push(aggregatedModel);
@@ -360,6 +383,9 @@ export default class AnalyticsDatabaseService<
findStatement.statement,
);
logger.debug(`${this.model.tableName} Find Statement executed`);
logger.debug(findStatement.statement);
const strResult: string = await StreamUtil.convertStreamToText(
dbResult.stream,
);
@@ -389,6 +415,10 @@ export default class AnalyticsDatabaseService<
strResult: string,
columns: string[],
): JSONObject[] {
if (!strResult || !strResult.trim()) {
return [];
}
const jsonItems: Array<JSONObject> = [];
const rows: Array<string> = strResult.split("\n");
@@ -457,7 +487,7 @@ export default class AnalyticsDatabaseService<
count()
FROM ${databaseName}.${this.model.tableName}
WHERE TRUE `.append(whereStatement);
if (countBy.groupBy && Object.keys(countBy.groupBy).length > 0) {
statement.append(
@@ -519,9 +549,11 @@ export default class AnalyticsDatabaseService<
statement.append(SQL` FROM ${databaseName}.${this.model.tableName}`);
statement.append(SQL` WHERE TRUE `).append(whereStatement);
statement.append(SQL` GROUP BY `).append(`${aggregateBy.aggregationTimestampColumnName.toString()}`);
statement.append(SQL` GROUP BY `).append(`${aggregateBy.aggregationTimestampColumnName.toString()}`);
if (aggregateBy.groupBy && Object.keys(aggregateBy.groupBy).length > 0) {
statement.append(SQL` , `).append(this.statementGenerator.toGroupByStatement(aggregateBy.groupBy));
}
statement.append(SQL` ORDER BY `).append(sortStatement);
@@ -538,7 +570,7 @@ export default class AnalyticsDatabaseService<
}}
`);
logger.debug(`${this.model.tableName} Aggregate Statement`);
logger.debug(statement);
@@ -604,7 +636,7 @@ export default class AnalyticsDatabaseService<
}}
`);
logger.debug(`${this.model.tableName} Find Statement`);
logger.debug(statement);
@@ -626,7 +658,7 @@ export default class AnalyticsDatabaseService<
const statement: Statement = SQL`
ALTER TABLE ${databaseName}.${this.model.tableName}
DELETE WHERE TRUE `.append(whereStatement);
logger.debug(`${this.model.tableName} Delete Statement`);
logger.debug(statement);
@@ -676,7 +708,13 @@ export default class AnalyticsDatabaseService<
(select as any)[tenantColumnName] = true;
}
await this.execute(this.toDeleteStatement(beforeDeleteBy));
const deleteStatement: Statement = this.toDeleteStatement(beforeDeleteBy);
await this.execute(deleteStatement);
logger.debug(`${this.model.tableName} Delete Statement executed`);
logger.debug(deleteStatement);
} catch (error) {
await this.onDeleteError(error as Exception);
throw this.getException(error as Exception);
@@ -727,9 +765,17 @@ export default class AnalyticsDatabaseService<
(select as any)[tenantColumnName] = true;
}
await this.execute(
this.statementGenerator.toUpdateStatement(beforeUpdateBy),
const statement: Statement = this.statementGenerator.toUpdateStatement(
beforeUpdateBy,
);
await this.execute(
statement,
);
logger.debug(`${this.model.tableName} Update Statement executed`);
logger.debug(statement);
} catch (error) {
await this.onUpdateError(error as Exception);
throw this.getException(error as Exception);
@@ -766,7 +812,7 @@ export default class AnalyticsDatabaseService<
: {
query: statement, // TODO remove and only accept Statements
},
);
) as ExecResult<Stream>;
}
protected async onUpdateSuccess(
@@ -914,6 +960,10 @@ export default class AnalyticsDatabaseService<
await this.execute(insertStatement);
logger.debug(`${this.model.tableName} Create Statement executed`);
logger.debug(insertStatement);
if (!createBy.props.ignoreHooks) {
for (let i: number = 0; i < items.length; i++) {
if (!items[i]) {

View File

@@ -437,10 +437,10 @@ class DatabaseService<TBaseModel extends BaseModel> extends BaseService {
const columns: Columns = this.model.getTableColumns();
for (const columnName of columns.columns) {
if (this.model.isEntityColumn(columnName)) {
const tableColumnMetadata: TableColumnMetadata =
this.model.getTableColumnMetadata(columnName);
const tableColumnMetadata: TableColumnMetadata =
this.model.getTableColumnMetadata(columnName);
if (this.model.isEntityColumn(columnName)) {
const columnValue: JSONValue = (data as any)[columnName];
if (
@@ -507,6 +507,14 @@ class DatabaseService<TBaseModel extends BaseModel> extends BaseService {
(data as any)[columnName] = columnValue.toString();
}
}
// if its a Date column and if date is null then set it to null.
if (
(data as any)[columnName] === "" &&
tableColumnMetadata.type === TableColumnType.Date
) {
(data as any)[columnName] = null;
}
}
// check createByUserId.
@@ -1384,6 +1392,9 @@ class DatabaseService<TBaseModel extends BaseModel> extends BaseService {
...data,
} as any;
logger.debug("Updated Item");
logger.debug(JSON.stringify(updatedItem, null, 2));
await this.getRepository().save(updatedItem);
// hit workflow.

View File

@@ -0,0 +1,89 @@
import { Blue500 } from "../../Types/BrandColors";
import Color from "../../Types/Color";
import OneUptimeDate from "../../Types/Date";
import BadDataException from "../../Types/Exception/BadDataException";
import ObjectID from "../../Types/ObjectID";
import { IsBillingEnabled } from "../EnvironmentConfig";
import logger from "../Utils/Logger";
import DatabaseService from "./DatabaseService";
import IncidentFeed, {
IncidentFeedEventType,
} from "Common/Models/DatabaseModels/IncidentFeed";
export class Service extends DatabaseService<IncidentFeed> {
public constructor() {
super(IncidentFeed);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
}
}
public async createIncidentFeed(data: {
incidentId: ObjectID;
feedInfoInMarkdown: string;
incidentFeedEventType: IncidentFeedEventType;
projectId: ObjectID;
moreInformationInMarkdown?: string | undefined;
displayColor?: Color | undefined;
userId?: ObjectID | undefined;
postedAt?: Date | undefined;
}): Promise<IncidentFeed> {
logger.debug("IncidentFeedService.createIncidentFeed");
logger.debug(data);
const incidentFeed: IncidentFeed = new IncidentFeed();
if (!data.incidentId) {
throw new BadDataException("Incident ID is required");
}
if (!data.feedInfoInMarkdown) {
throw new BadDataException("Log in markdown is required");
}
if (!data.incidentFeedEventType) {
throw new BadDataException("Incident log event is required");
}
if (!data.projectId) {
throw new BadDataException("Project ID is required");
}
if (!data.displayColor) {
data.displayColor = Blue500;
}
incidentFeed.displayColor = data.displayColor;
incidentFeed.incidentId = data.incidentId;
incidentFeed.feedInfoInMarkdown = data.feedInfoInMarkdown;
incidentFeed.incidentFeedEventType = data.incidentFeedEventType;
incidentFeed.projectId = data.projectId;
if (!data.postedAt) {
incidentFeed.postedAt = OneUptimeDate.getCurrentDate();
}
if (data.userId) {
incidentFeed.userId = data.userId;
}
if (data.moreInformationInMarkdown) {
incidentFeed.moreInformationInMarkdown = data.moreInformationInMarkdown;
}
const createdIncidentFeed: IncidentFeed = await this.create({
data: incidentFeed,
props: {
isRoot: true,
},
});
logger.debug("Incident Feed created");
logger.debug(createdIncidentFeed);
return createdIncidentFeed;
}
}
export default new Service();

View File

@@ -1,10 +1,38 @@
import ObjectID from "../../Types/ObjectID";
import DatabaseService from "./DatabaseService";
import Model from "Common/Models/DatabaseModels/IncidentInternalNote";
import { OnCreate } from "../Types/Database/Hooks";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Blue500 } from "../../Types/BrandColors";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
}
public override async onCreateSuccess(
_onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
const userId: ObjectID | null | undefined =
createdItem.createdByUserId || createdItem.createdByUser?.id;
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.incidentId!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.PrivateNote,
displayColor: Blue500,
userId: userId || undefined,
feedInfoInMarkdown: `**Posted Internal / Private Note**
${createdItem.note}
`,
});
return createdItem;
}
}
export default new Service();

View File

@@ -1,10 +1,122 @@
import ObjectID from "../../Types/ObjectID";
import { OnCreate, OnDelete } from "../Types/Database/Hooks";
import DatabaseService from "./DatabaseService";
import Model from "Common/Models/DatabaseModels/IncidentOwnerTeam";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Gray500, Red500 } from "../../Types/BrandColors";
import TeamService from "./TeamService";
import Team from "../../Models/DatabaseModels/Team";
import DeleteBy from "../Types/Database/DeleteBy";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
}
protected override async onBeforeDelete(
deleteBy: DeleteBy<Model>,
): Promise<OnDelete<Model>> {
const itemsToDelete: Model[] = await this.findBy({
query: deleteBy.query,
limit: deleteBy.limit,
skip: deleteBy.skip,
props: {
isRoot: true,
},
select: {
incidentId: true,
projectId: true,
teamId: true,
},
});
return {
carryForward: {
itemsToDelete: itemsToDelete,
},
deleteBy: deleteBy,
};
}
protected override async onDeleteSuccess(
onDelete: OnDelete<Model>,
_itemIdsBeforeDelete: Array<ObjectID>,
): Promise<OnDelete<Model>> {
const deleteByUserId: ObjectID | undefined =
onDelete.deleteBy.deletedByUser?.id || onDelete.deleteBy.props.userId;
const itemsToDelete: Model[] = onDelete.carryForward.itemsToDelete;
for (const item of itemsToDelete) {
const incidentId: ObjectID | undefined = item.incidentId;
const projectId: ObjectID | undefined = item.projectId;
const teamId: ObjectID | undefined = item.teamId;
if (incidentId && teamId && projectId) {
const team: Team | null = await TeamService.findOneById({
id: teamId,
select: {
name: true,
},
props: {
isRoot: true,
},
});
if (team && team.name) {
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: projectId,
incidentFeedEventType: IncidentFeedEventType.OwnerTeamRemoved,
displayColor: Red500,
feedInfoInMarkdown: `**Team ${team.name}** was removed from the incident as the owner.`,
userId: deleteByUserId || undefined,
});
}
}
}
return onDelete;
}
public override async onCreateSuccess(
onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
// add incident feed.
const incidentId: ObjectID | undefined = createdItem.incidentId;
const projectId: ObjectID | undefined = createdItem.projectId;
const teamId: ObjectID | undefined = createdItem.teamId;
const createdByUserId: ObjectID | undefined =
createdItem.createdByUserId || onCreate.createBy.props.userId;
if (incidentId && teamId && projectId) {
const team: Team | null = await TeamService.findOneById({
id: teamId,
select: {
name: true,
},
props: {
isRoot: true,
},
});
if (team && team.name) {
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: projectId,
incidentFeedEventType: IncidentFeedEventType.OwnerTeamAdded,
displayColor: Gray500,
feedInfoInMarkdown: `**Team ${team.name}** was added to the incident as the owner.`,
userId: createdByUserId || undefined,
});
}
}
return createdItem;
}
}
export default new Service();

View File

@@ -1,10 +1,124 @@
import ObjectID from "../../Types/ObjectID";
import DatabaseService from "./DatabaseService";
import Model from "Common/Models/DatabaseModels/IncidentOwnerUser";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Gray500, Red500 } from "../../Types/BrandColors";
import User from "../../Models/DatabaseModels/User";
import UserService from "./UserService";
import { OnCreate, OnDelete } from "../Types/Database/Hooks";
import DeleteBy from "../Types/Database/DeleteBy";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
}
protected override async onBeforeDelete(
deleteBy: DeleteBy<Model>,
): Promise<OnDelete<Model>> {
const itemsToDelete: Model[] = await this.findBy({
query: deleteBy.query,
limit: deleteBy.limit,
skip: deleteBy.skip,
props: {
isRoot: true,
},
select: {
incidentId: true,
projectId: true,
userId: true,
},
});
return {
carryForward: {
itemsToDelete: itemsToDelete,
},
deleteBy: deleteBy,
};
}
protected override async onDeleteSuccess(
onDelete: OnDelete<Model>,
_itemIdsBeforeDelete: Array<ObjectID>,
): Promise<OnDelete<Model>> {
const deleteByUserId: ObjectID | undefined =
onDelete.deleteBy.deletedByUser?.id || onDelete.deleteBy.props.userId;
const itemsToDelete: Model[] = onDelete.carryForward.itemsToDelete;
for (const item of itemsToDelete) {
const incidentId: ObjectID | undefined = item.incidentId;
const projectId: ObjectID | undefined = item.projectId;
const userId: ObjectID | undefined = item.userId;
if (incidentId && userId && projectId) {
const user: User | null = await UserService.findOneById({
id: userId,
select: {
name: true,
email: true,
},
props: {
isRoot: true,
},
});
if (user && user.name) {
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: projectId,
incidentFeedEventType: IncidentFeedEventType.OwnerUserRemoved,
displayColor: Red500,
feedInfoInMarkdown: `**${user.name.toString()}** (${user.email?.toString()}) was removed from the incident as the owner.`,
userId: deleteByUserId || undefined,
});
}
}
}
return onDelete;
}
public override async onCreateSuccess(
onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
// add incident feed.
const incidentId: ObjectID | undefined = createdItem.incidentId;
const projectId: ObjectID | undefined = createdItem.projectId;
const userId: ObjectID | undefined = createdItem.userId;
const createdByUserId: ObjectID | undefined =
createdItem.createdByUserId || onCreate.createBy.props.userId;
if (incidentId && userId && projectId) {
const user: User | null = await UserService.findOneById({
id: userId,
select: {
name: true,
email: true,
},
props: {
isRoot: true,
},
});
if (user && user.name) {
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: projectId,
incidentFeedEventType: IncidentFeedEventType.OwnerUserAdded,
displayColor: Gray500,
feedInfoInMarkdown: `**${user.name.toString()}** (${user.email?.toString()}) was added to the incident as the owner.`,
userId: createdByUserId || undefined,
});
}
}
return createdItem;
}
}
export default new Service();

View File

@@ -3,6 +3,10 @@ import { OnCreate } from "../Types/Database/Hooks";
import DatabaseService from "./DatabaseService";
import OneUptimeDate from "../../Types/Date";
import Model from "Common/Models/DatabaseModels/IncidentPublicNote";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Indigo500 } from "../../Types/BrandColors";
import ObjectID from "../../Types/ObjectID";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -21,6 +25,28 @@ export class Service extends DatabaseService<Model> {
carryForward: null,
};
}
public override async onCreateSuccess(
_onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
const userId: ObjectID | null | undefined =
createdItem.createdByUserId || createdItem.createdByUser?.id;
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.incidentId!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.PublicNote,
displayColor: Indigo500,
userId: userId || undefined,
feedInfoInMarkdown: `**Posted public note for this incident on status page**
${createdItem.note}
`,
});
return createdItem;
}
}
export default new Service();

View File

@@ -34,6 +34,22 @@ import MonitorStatus from "Common/Models/DatabaseModels/MonitorStatus";
import MonitorStatusTimeline from "Common/Models/DatabaseModels/MonitorStatusTimeline";
import User from "Common/Models/DatabaseModels/User";
import { IsBillingEnabled } from "../EnvironmentConfig";
import MetricService from "./MetricService";
import IncidentMetricType from "../../Types/Incident/IncidentMetricType";
import Metric, {
MetricPointType,
ServiceType,
} from "../../Models/AnalyticsModels/Metric";
import OneUptimeDate from "../../Types/Date";
import TelemetryUtil from "../Utils/Telemetry/Telemetry";
import TelemetryType from "../../Types/Telemetry/TelemetryType";
import logger from "../Utils/Logger";
import Semaphore, {
SemaphoreMutex,
} from "Common/Server/Infrastructure/Semaphore";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Gray500, Red500 } from "../../Types/BrandColors";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -139,6 +155,34 @@ export class Service extends DatabaseService<Model> {
isRoot: true,
},
});
// store incident metric
}
public async getExistingIncidentNumberForProject(data: {
projectId: ObjectID;
}): Promise<number> {
// get last incident number.
const lastIncident: Model | null = await this.findOneBy({
query: {
projectId: data.projectId,
},
select: {
incidentNumber: true,
},
sort: {
createdAt: SortOrder.Descending,
},
props: {
isRoot: true,
},
});
if (!lastIncident) {
return 0;
}
return lastIncident.incidentNumber || 0;
}
protected override async onBeforeCreate(
@@ -148,10 +192,13 @@ export class Service extends DatabaseService<Model> {
throw new BadDataException("ProjectId required to create incident.");
}
const projectId: ObjectID =
createBy.props.tenantId || createBy.data.projectId!;
const incidentState: IncidentState | null =
await IncidentStateService.findOneBy({
query: {
projectId: createBy.props.tenantId || createBy.data.projectId!,
projectId: projectId,
isCreatedState: true,
},
select: {
@@ -168,7 +215,39 @@ export class Service extends DatabaseService<Model> {
);
}
let mutex: SemaphoreMutex | null = null;
try {
mutex = await Semaphore.lock({
key: projectId.toString(),
namespace: "IncidentService.incident-create",
lockTimeout: 15000,
acquireTimeout: 20000,
});
logger.debug(
"Mutex acquired - IncidentService.incident-create " +
projectId.toString() +
" at " +
OneUptimeDate.getCurrentDateAsFormattedString(),
);
} catch (err) {
logger.debug(
"Mutex acquire failed - IncidentService.incident-create " +
projectId.toString() +
" at " +
OneUptimeDate.getCurrentDateAsFormattedString(),
);
logger.error(err);
}
const incidentNumberForThisIncident: number =
(await this.getExistingIncidentNumberForProject({
projectId: projectId,
})) + 1;
createBy.data.currentIncidentStateId = incidentState.id;
createBy.data.incidentNumber = incidentNumberForThisIncident;
if (
(createBy.data.createdByUserId ||
@@ -205,13 +284,19 @@ export class Service extends DatabaseService<Model> {
}
}
return { createBy, carryForward: null };
return {
createBy,
carryForward: {
mutex: mutex,
},
};
}
protected override async onCreateSuccess(
onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
// these should never be null.
if (!createdItem.projectId) {
throw new BadDataException("projectId is required");
}
@@ -220,6 +305,52 @@ export class Service extends DatabaseService<Model> {
throw new BadDataException("id is required");
}
// release the mutex.
if (onCreate.carryForward && onCreate.carryForward.mutex) {
const mutex: SemaphoreMutex = onCreate.carryForward.mutex;
const projectId: ObjectID = createdItem.projectId!;
try {
await Semaphore.release(mutex);
logger.debug(
"Mutex released - IncidentService.incident-create " +
projectId.toString() +
" at " +
OneUptimeDate.getCurrentDateAsFormattedString(),
);
} catch (err) {
logger.debug(
"Mutex release failed - IncidentService.incident-create " +
projectId.toString() +
" at " +
OneUptimeDate.getCurrentDateAsFormattedString(),
);
logger.error(err);
}
}
const createdByUserId: ObjectID | undefined | null =
createdItem.createdByUserId || createdItem.createdByUser?.id;
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.id!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.IncidentCreated,
displayColor: Red500,
feedInfoInMarkdown: `**Incident #${createdItem.incidentNumber?.toString()} Created**:
**Incident Title**:
${createdItem.title || "No title provided."}
**Description**:
${createdItem.description || "No description provided."}
`,
userId: createdByUserId || undefined,
});
if (!createdItem.currentIncidentStateId) {
throw new BadDataException("currentIncidentStateId is required");
}
@@ -260,6 +391,26 @@ export class Service extends DatabaseService<Model> {
},
});
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.id!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.RootCause,
displayColor: Red500,
feedInfoInMarkdown: `**Root Cause**
${createdItem.rootCause || "No root cause provided."}`,
});
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.id!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.RemediationNotes,
displayColor: Red500,
feedInfoInMarkdown: `**Remediation Notes**
${createdItem.remediationNotes || "No remediation notes provided."}`,
});
// add owners.
if (
@@ -491,6 +642,82 @@ export class Service extends DatabaseService<Model> {
}
}
if (updatedItemIds.length > 0) {
for (const incidentId of updatedItemIds) {
if (onUpdate.updateBy.data.title) {
// add incident feed.
const createdByUserId: ObjectID | undefined | null =
onUpdate.updateBy.props.userId;
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: onUpdate.updateBy.props.tenantId as ObjectID,
incidentFeedEventType: IncidentFeedEventType.IncidentUpdated,
displayColor: Gray500,
feedInfoInMarkdown: `**Incident title was updated.** Here's the new title.
${onUpdate.updateBy.data.title || "No title provided."}
`,
userId: createdByUserId || undefined,
});
}
if (onUpdate.updateBy.data.rootCause) {
// add incident feed.
const createdByUserId: ObjectID | undefined | null =
onUpdate.updateBy.props.userId;
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: onUpdate.updateBy.props.tenantId as ObjectID,
incidentFeedEventType: IncidentFeedEventType.IncidentUpdated,
displayColor: Gray500,
feedInfoInMarkdown: `**Incident root cause was updated.** Here's the new root cause.
${onUpdate.updateBy.data.rootCause || "No root cause provided."}
`,
userId: createdByUserId || undefined,
});
}
if (onUpdate.updateBy.data.description) {
// add incident feed.
const createdByUserId: ObjectID | undefined | null =
onUpdate.updateBy.props.userId;
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: onUpdate.updateBy.props.tenantId as ObjectID,
incidentFeedEventType: IncidentFeedEventType.IncidentUpdated,
displayColor: Gray500,
feedInfoInMarkdown: `**Incident description was updated.** Here's the new description.
${onUpdate.updateBy.data.description || "No description provided."}
`,
userId: createdByUserId || undefined,
});
}
if (onUpdate.updateBy.data.remediationNotes) {
// add incident feed.
const createdByUserId: ObjectID | undefined | null =
onUpdate.updateBy.props.userId;
await IncidentFeedService.createIncidentFeed({
incidentId: incidentId,
projectId: onUpdate.updateBy.props.tenantId as ObjectID,
incidentFeedEventType: IncidentFeedEventType.IncidentUpdated,
displayColor: Gray500,
feedInfoInMarkdown: `**Remediation notes were updated.** Here are the new notes.
${onUpdate.updateBy.data.remediationNotes || "No remediation notes provided."}
`,
userId: createdByUserId || undefined,
});
}
}
}
return onUpdate;
}
@@ -737,5 +964,291 @@ export class Service extends DatabaseService<Model> {
props: props || {},
});
}
public async refreshIncidentMetrics(data: {
incidentId: ObjectID;
}): Promise<void> {
const incident: Model | null = await this.findOneById({
id: data.incidentId,
select: {
projectId: true,
monitors: {
_id: true,
name: true,
},
incidentSeverity: {
_id: true,
name: true,
},
},
props: {
isRoot: true,
},
});
if (!incident) {
throw new BadDataException("Incident not found");
}
if (!incident.projectId) {
throw new BadDataException("Incient Project ID not found");
}
// get incident state timeline
const incidentStateTimelines: Array<IncidentStateTimeline> =
await IncidentStateTimelineService.findBy({
query: {
incidentId: data.incidentId,
},
select: {
projectId: true,
incidentStateId: true,
incidentState: {
isAcknowledgedState: true,
isResolvedState: true,
},
startsAt: true,
endsAt: true,
},
sort: {
startsAt: SortOrder.Ascending,
},
skip: 0,
limit: LIMIT_PER_PROJECT,
props: {
isRoot: true,
},
});
const firstIncidentStateTimeline: IncidentStateTimeline | undefined =
incidentStateTimelines[0];
// delete all the incident metrics with this incident id because its a refresh.
await MetricService.deleteBy({
query: {
serviceId: data.incidentId,
},
props: {
isRoot: true,
},
});
const itemsToSave: Array<Metric> = [];
// now we need to create new metrics for this incident - TimeToAcknowledge, TimeToResolve, IncidentCount, IncidentDuration
const incidentStartsAt: Date =
firstIncidentStateTimeline?.startsAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
const incidentCountMetric: Metric = new Metric();
incidentCountMetric.projectId = incident.projectId;
incidentCountMetric.serviceId = incident.id!;
incidentCountMetric.serviceType = ServiceType.Incident;
incidentCountMetric.name = IncidentMetricType.IncidentCount;
incidentCountMetric.description = "Number of incidents created";
incidentCountMetric.value = 1;
incidentCountMetric.unit = "";
incidentCountMetric.attributes = {
incidentId: data.incidentId.toString(),
projectId: incident.projectId.toString(),
monitorIds:
incident.monitors?.map((monitor: Monitor) => {
return monitor._id?.toString();
}) || [],
monitorNames:
incident.monitors?.map((monitor: Monitor) => {
return monitor.name?.toString();
}) || [],
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
incidentCountMetric.time = incidentStartsAt;
incidentCountMetric.timeUnixNano = OneUptimeDate.toUnixNano(
incidentCountMetric.time,
);
incidentCountMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(incidentCountMetric);
// is the incident acknowledged?
const isIncidentAcknowledged: boolean = incidentStateTimelines.some(
(timeline: IncidentStateTimeline) => {
return timeline.incidentState?.isAcknowledgedState;
},
);
if (isIncidentAcknowledged) {
const ackIncidentStateTimeline: IncidentStateTimeline | undefined =
incidentStateTimelines.find((timeline: IncidentStateTimeline) => {
return timeline.incidentState?.isAcknowledgedState;
});
if (ackIncidentStateTimeline) {
const timeToAcknowledgeMetric: Metric = new Metric();
timeToAcknowledgeMetric.projectId = incident.projectId;
timeToAcknowledgeMetric.serviceId = incident.id!;
timeToAcknowledgeMetric.serviceType = ServiceType.Incident;
timeToAcknowledgeMetric.name = IncidentMetricType.TimeToAcknowledge;
timeToAcknowledgeMetric.description =
"Time taken to acknowledge the incident";
timeToAcknowledgeMetric.value = OneUptimeDate.getDifferenceInSeconds(
ackIncidentStateTimeline?.startsAt || OneUptimeDate.getCurrentDate(),
incidentStartsAt,
);
timeToAcknowledgeMetric.unit = "seconds";
timeToAcknowledgeMetric.attributes = {
incidentId: data.incidentId.toString(),
projectId: incident.projectId.toString(),
monitorIds:
incident.monitors?.map((monitor: Monitor) => {
return monitor._id?.toString();
}) || [],
monitorNames:
incident.monitors?.map((monitor: Monitor) => {
return monitor.name?.toString();
}) || [],
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
timeToAcknowledgeMetric.time =
ackIncidentStateTimeline?.startsAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
timeToAcknowledgeMetric.timeUnixNano = OneUptimeDate.toUnixNano(
timeToAcknowledgeMetric.time,
);
timeToAcknowledgeMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(timeToAcknowledgeMetric);
}
}
// time to resolve
const isIncidentResolved: boolean = incidentStateTimelines.some(
(timeline: IncidentStateTimeline) => {
return timeline.incidentState?.isResolvedState;
},
);
if (isIncidentResolved) {
const resolvedIncidentStateTimeline: IncidentStateTimeline | undefined =
incidentStateTimelines.find((timeline: IncidentStateTimeline) => {
return timeline.incidentState?.isResolvedState;
});
if (resolvedIncidentStateTimeline) {
const timeToResolveMetric: Metric = new Metric();
timeToResolveMetric.projectId = incident.projectId;
timeToResolveMetric.serviceId = incident.id!;
timeToResolveMetric.serviceType = ServiceType.Incident;
timeToResolveMetric.name = IncidentMetricType.TimeToResolve;
timeToResolveMetric.description = "Time taken to resolve the incident";
timeToResolveMetric.value = OneUptimeDate.getDifferenceInSeconds(
resolvedIncidentStateTimeline?.startsAt ||
OneUptimeDate.getCurrentDate(),
incidentStartsAt,
);
timeToResolveMetric.unit = "seconds";
timeToResolveMetric.attributes = {
incidentId: data.incidentId.toString(),
projectId: incident.projectId.toString(),
monitorIds:
incident.monitors?.map((monitor: Monitor) => {
return monitor._id?.toString();
}) || [],
monitorNames:
incident.monitors?.map((monitor: Monitor) => {
return monitor.name?.toString();
}) || [],
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
timeToResolveMetric.time =
resolvedIncidentStateTimeline?.startsAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
timeToResolveMetric.timeUnixNano = OneUptimeDate.toUnixNano(
timeToResolveMetric.time,
);
timeToResolveMetric.metricPointType = MetricPointType.Sum;
itemsToSave.push(timeToResolveMetric);
}
}
// incident duration
const incidentDurationMetric: Metric = new Metric();
const lastIncidentStateTimeline: IncidentStateTimeline | undefined =
incidentStateTimelines[incidentStateTimelines.length - 1];
if (lastIncidentStateTimeline) {
const incidentEndsAt: Date =
lastIncidentStateTimeline.startsAt || OneUptimeDate.getCurrentDate();
// save metric.
incidentDurationMetric.projectId = incident.projectId;
incidentDurationMetric.serviceId = incident.id!;
incidentDurationMetric.serviceType = ServiceType.Incident;
incidentDurationMetric.name = IncidentMetricType.IncidentDuration;
incidentDurationMetric.description = "Duration of the incident";
incidentDurationMetric.value = OneUptimeDate.getDifferenceInSeconds(
incidentEndsAt,
incidentStartsAt,
);
incidentDurationMetric.unit = "seconds";
incidentDurationMetric.attributes = {
incidentId: data.incidentId.toString(),
projectId: incident.projectId.toString(),
monitorIds:
incident.monitors?.map((monitor: Monitor) => {
return monitor._id?.toString();
}) || [],
monitorNames:
incident.monitors?.map((monitor: Monitor) => {
return monitor.name?.toString();
}) || [],
incidentSeverityId: incident.incidentSeverity?._id?.toString(),
incidentSeverityName: incident.incidentSeverity?.name?.toString(),
};
incidentDurationMetric.time =
lastIncidentStateTimeline?.startsAt ||
incident.createdAt ||
OneUptimeDate.getCurrentDate();
incidentDurationMetric.timeUnixNano = OneUptimeDate.toUnixNano(
incidentDurationMetric.time,
);
incidentDurationMetric.metricPointType = MetricPointType.Sum;
}
await MetricService.createMany({
items: itemsToSave,
props: {
isRoot: true,
},
});
// index attributes.
TelemetryUtil.indexAttributes({
attributes: ["monitorIds", "projectId", "incidentId", "monitorNames"],
projectId: incident.projectId,
telemetryType: TelemetryType.Metric,
}).catch((err: Error) => {
logger.error(err);
});
}
}
export default new Service();

View File

@@ -19,6 +19,9 @@ import IncidentState from "Common/Models/DatabaseModels/IncidentState";
import IncidentStateTimeline from "Common/Models/DatabaseModels/IncidentStateTimeline";
import User from "Common/Models/DatabaseModels/User";
import { IsBillingEnabled } from "../EnvironmentConfig";
import logger from "../Utils/Logger";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
export class Service extends DatabaseService<IncidentStateTimeline> {
public constructor() {
@@ -119,32 +122,17 @@ export class Service extends DatabaseService<IncidentStateTimeline> {
)?.["publicNote"] as string | undefined;
if (publicNote) {
const incidentPublicNote: IncidentPublicNote = new IncidentPublicNote();
incidentPublicNote.incidentId = createBy.data.incidentId;
incidentPublicNote.note = publicNote;
incidentPublicNote.postedAt = createBy.data.startsAt;
incidentPublicNote.createdAt = createBy.data.startsAt;
incidentPublicNote.projectId = createBy.data.projectId!;
incidentPublicNote.shouldStatusPageSubscribersBeNotifiedOnNoteCreated =
Boolean(createBy.data.shouldStatusPageSubscribersBeNotified);
// mark status page subscribers as notified for this state change because we dont want to send duplicate (two) emails one for public note and one for state change.
if (
incidentPublicNote.shouldStatusPageSubscribersBeNotifiedOnNoteCreated
) {
if (createBy.data.shouldStatusPageSubscribersBeNotified) {
createBy.data.isStatusPageSubscribersNotified = true;
}
await IncidentPublicNoteService.create({
data: incidentPublicNote,
props: createBy.props,
});
}
return {
createBy,
carryForward: {
lastIncidentStateTimelineId: lastIncidentStateTimeline?.id || null,
publicNote: publicNote,
},
};
}
@@ -188,20 +176,39 @@ export class Service extends DatabaseService<IncidentStateTimeline> {
// TODO: DELETE THIS WHEN WORKFLOW IS IMPLEMENMTED.
// check if this is resolved state, and if it is then resolve all the monitors.
const isResolvedState: IncidentState | null =
const incidentState: IncidentState | null =
await IncidentStateService.findOneBy({
query: {
_id: createdItem.incidentStateId.toString()!,
isResolvedState: true,
},
props: {
isRoot: true,
},
select: {
_id: true,
isResolvedState: true,
isAcknowledgedState: true,
isCreatedState: true,
color: true,
name: true,
},
});
const stateName: string = incidentState?.name || "";
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.incidentId!,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.IncidentStateChanged,
displayColor: incidentState?.color,
feedInfoInMarkdown: "**Incident State** changed to **" + stateName + "**",
moreInformationInMarkdown: `**Cause:**
${createdItem.rootCause}`,
userId: createdItem.createdByUserId || onCreate.createBy.props.userId,
});
const isResolvedState: boolean = incidentState?.isResolvedState || false;
if (isResolvedState) {
const incident: Incident | null = await IncidentService.findOneBy({
query: {
@@ -227,6 +234,31 @@ export class Service extends DatabaseService<IncidentStateTimeline> {
}
}
if (onCreate.carryForward.publicNote) {
const publicNote: string = onCreate.carryForward.publicNote;
const incidentPublicNote: IncidentPublicNote = new IncidentPublicNote();
incidentPublicNote.incidentId = createdItem.incidentId;
incidentPublicNote.note = publicNote;
incidentPublicNote.postedAt = createdItem.startsAt!;
incidentPublicNote.createdAt = createdItem.startsAt!;
incidentPublicNote.projectId = createdItem.projectId!;
incidentPublicNote.shouldStatusPageSubscribersBeNotifiedOnNoteCreated =
Boolean(createdItem.shouldStatusPageSubscribersBeNotified);
await IncidentPublicNoteService.create({
data: incidentPublicNote,
props: onCreate.createBy.props,
});
}
IncidentService.refreshIncidentMetrics({
incidentId: createdItem.incidentId,
}).catch((error: Error) => {
logger.error(`Error while refreshing incident metrics:`);
logger.error(error);
});
return createdItem;
}

View File

@@ -41,7 +41,6 @@ import MonitorGroupOwnerTeamService from "./MonitorGroupOwnerTeamService";
import MonitorGroupOwnerUserService from "./MonitorGroupOwnerUserService";
import MonitorGroupResourceService from "./MonitorGroupResourceService";
import MonitorGroupService from "./MonitorGroupService";
import MonitorMetricsByMinuteService from "./MonitorMetricsByMinuteService";
import MonitorOwnerTeamService from "./MonitorOwnerTeamService";
import MonitorOwnerUserService from "./MonitorOwnerUserService";
import MonitorProbeService from "./MonitorProbeService";
@@ -148,6 +147,9 @@ import AlertOwnerUserService from "./AlertOwnerUserService";
import AlertSeverityService from "./AlertSeverityService";
import AlertNoteTemplateService from "./AlertNoteTemplateService";
import TableViewService from "./TableViewService";
import ScheduledMaintenanceFeedService from "./ScheduledMaintenanceFeedService";
import AlertFeedService from "./AlertFeedService";
import IncidentFeedService from "./IncidentFeedService";
const services: Array<BaseService> = [
AcmeCertificateService,
@@ -187,6 +189,7 @@ const services: Array<BaseService> = [
IncidentSeverityService,
IncidentStateService,
IncidentStateTimelineService,
IncidentFeedService,
LabelService,
@@ -223,6 +226,7 @@ const services: Array<BaseService> = [
ScheduledMaintenanceService,
ScheduledMaintenanceStateService,
ScheduledMaintenanceStateTimelineService,
ScheduledMaintenanceFeedService,
ShortLinkService,
SmsLogService,
@@ -307,6 +311,8 @@ const services: Array<BaseService> = [
AlertOwnerUserService,
AlertSeverityService,
AlertNoteTemplateService,
AlertFeedService,
TableViewService,
];
@@ -316,7 +322,6 @@ export const AnalyticsServices: Array<
LogService,
SpanService,
MetricService,
MonitorMetricsByMinuteService,
TelemetryAttributeService,
ExceptionInstanceService,
];

View File

@@ -1,14 +0,0 @@
import ClickhouseDatabase from "../Infrastructure/ClickhouseDatabase";
import AnalyticsDatabaseService from "./AnalyticsDatabaseService";
import MonitorMetricsByMinute from "Common/Models/AnalyticsModels/MonitorMetricsByMinute";
export class MonitorMetricsByMinuteService extends AnalyticsDatabaseService<MonitorMetricsByMinute> {
public constructor(clickhouseDatabase?: ClickhouseDatabase | undefined) {
super({
modelType: MonitorMetricsByMinute,
database: clickhouseDatabase,
});
}
}
export default new MonitorMetricsByMinuteService();

View File

@@ -97,6 +97,11 @@ export class Service extends DatabaseService<MonitorStatusTimeline> {
},
});
if (!lastMonitorStatusTimeline) {
// since this is the first status, do not notify the owner.
createBy.data.isOwnerNotified = true;
}
return {
createBy,
carryForward: {

View File

@@ -1,5 +1,5 @@
import CreateBy from "../Types/Database/CreateBy";
import { OnCreate } from "../Types/Database/Hooks";
import { OnCreate, OnUpdate } from "../Types/Database/Hooks";
import DatabaseService from "./DatabaseService";
import OnCallDutyPolicyEscalationRuleService from "./OnCallDutyPolicyEscalationRuleService";
import OnCallDutyPolicyStatus from "../../Types/OnCallDutyPolicy/OnCallDutyPolicyStatus";
@@ -7,6 +7,13 @@ import UserNotificationEventType from "../../Types/UserNotification/UserNotifica
import OnCallDutyPolicyEscalationRule from "Common/Models/DatabaseModels/OnCallDutyPolicyEscalationRule";
import Model from "Common/Models/DatabaseModels/OnCallDutyPolicyExecutionLog";
import { IsBillingEnabled } from "../EnvironmentConfig";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import { Blue500, Green500, Red500, Yellow500 } from "../../Types/BrandColors";
import OnCallDutyPolicy from "../../Models/DatabaseModels/OnCallDutyPolicy";
import OnCallDutyPolicyService from "./OnCallDutyPolicyService";
import ObjectID from "../../Types/ObjectID";
import Color from "../../Types/Color";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -32,6 +39,31 @@ export class Service extends DatabaseService<Model> {
_onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
if (createdItem.triggeredByIncidentId) {
const onCallPolicy: OnCallDutyPolicy | null =
await OnCallDutyPolicyService.findOneById({
id: createdItem.onCallDutyPolicyId!,
select: {
_id: true,
projectId: true,
name: true,
},
props: {
isRoot: true,
},
});
if (onCallPolicy && onCallPolicy.id) {
await IncidentFeedService.createIncidentFeed({
incidentId: createdItem.triggeredByIncidentId,
projectId: createdItem.projectId!,
incidentFeedEventType: IncidentFeedEventType.OnCallPolicy,
displayColor: Yellow500,
feedInfoInMarkdown: `**On Call Policy Started Executing:** On Call Policy **${onCallPolicy.name}** started executing. Users on call on this policy will now be notified.`,
});
}
}
// get execution rules in this policy adn execute the first rule.
const executionRule: OnCallDutyPolicyEscalationRule | null =
await OnCallDutyPolicyEscalationRuleService.findOneBy({
@@ -97,5 +129,88 @@ export class Service extends DatabaseService<Model> {
return createdItem;
}
public getDisplayColorByStatus(status: OnCallDutyPolicyStatus): Color {
switch (status) {
case OnCallDutyPolicyStatus.Scheduled:
return Blue500;
case OnCallDutyPolicyStatus.Started:
return Yellow500;
case OnCallDutyPolicyStatus.Executing:
return Yellow500;
case OnCallDutyPolicyStatus.Completed:
return Green500;
case OnCallDutyPolicyStatus.Error:
return Red500;
default:
return Blue500;
}
}
protected override async onUpdateSuccess(
onUpdate: OnUpdate<Model>,
_updatedItemIds: Array<ObjectID>,
): Promise<OnUpdate<Model>> {
// if status is updtaed then check if this on-call is related to the incident, if yes, then add to incident feed.
if (onUpdate.updateBy.data.status && onUpdate.updateBy.query._id) {
const id: ObjectID = onUpdate.updateBy.query._id! as ObjectID;
const onCalldutyPolicyExecutionLog: Model | null = await this.findOneById(
{
id: id,
select: {
_id: true,
projectId: true,
onCallDutyPolicyId: true,
status: true,
statusMessage: true,
triggeredByIncidentId: true,
},
props: {
isRoot: true,
},
},
);
if (
onCalldutyPolicyExecutionLog &&
onCalldutyPolicyExecutionLog.triggeredByIncidentId
) {
const onCallPolicy: OnCallDutyPolicy | null =
await OnCallDutyPolicyService.findOneById({
id: onCalldutyPolicyExecutionLog.onCallDutyPolicyId!,
select: {
_id: true,
projectId: true,
name: true,
},
props: {
isRoot: true,
},
});
if (onCallPolicy && onCallPolicy.id) {
await IncidentFeedService.createIncidentFeed({
incidentId: onCalldutyPolicyExecutionLog.triggeredByIncidentId,
projectId: onCalldutyPolicyExecutionLog.projectId!,
incidentFeedEventType: IncidentFeedEventType.OnCallPolicy,
displayColor: onCalldutyPolicyExecutionLog.status
? this.getDisplayColorByStatus(
onCalldutyPolicyExecutionLog.status,
)
: Blue500,
moreInformationInMarkdown: `**Status:** ${onCalldutyPolicyExecutionLog.status}
**Message:** ${onCalldutyPolicyExecutionLog.statusMessage}`,
feedInfoInMarkdown: `**On Call Policy Status Updated:**
On-call policy **${onCallPolicy.name}** status updated to **${onCalldutyPolicyExecutionLog.status}**`,
});
}
}
}
return onUpdate;
}
}
export default new Service();

View File

@@ -1,9 +1,178 @@
import { OnCreate, OnUpdate } from "../Types/Database/Hooks";
import DatabaseService from "./DatabaseService";
import Model from "Common/Models/DatabaseModels/OnCallDutyPolicyExecutionLogTimeline";
import IncidentFeedService from "./IncidentFeedService";
import { IncidentFeedEventType } from "../../Models/DatabaseModels/IncidentFeed";
import OnCallDutyExecutionLogTimelineStatus from "../../Types/OnCallDutyPolicy/OnCalDutyExecutionLogTimelineStatus";
import { Blue500, Green500, Red500, Yellow500 } from "../../Types/BrandColors";
import Color from "../../Types/Color";
import ObjectID from "../../Types/ObjectID";
import logger from "../Utils/Logger";
import { LIMIT_PER_PROJECT } from "../../Types/Database/LimitMax";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
}
public getColorBasedOnStatus(
status: OnCallDutyExecutionLogTimelineStatus,
): Color {
switch (status) {
case OnCallDutyExecutionLogTimelineStatus.Started:
return Blue500;
case OnCallDutyExecutionLogTimelineStatus.Executing:
return Yellow500;
case OnCallDutyExecutionLogTimelineStatus.Error:
return Red500;
case OnCallDutyExecutionLogTimelineStatus.NotificationSent:
return Green500;
case OnCallDutyExecutionLogTimelineStatus.Skipped:
return Red500;
case OnCallDutyExecutionLogTimelineStatus.SuccessfullyAcknowledged:
return Green500;
default:
return Blue500;
}
}
public async addToIncidentFeed(data: {
onCallDutyPolicyExecutionLogTimelineId: ObjectID;
}): Promise<void> {
logger.debug(
"OnCallDutyPolicyExecutionLogTimelineService.addToIncidentFeed",
);
const onCallDutyPolicyExecutionLogTimeline: Model | null =
await this.findOneById({
id: data.onCallDutyPolicyExecutionLogTimelineId,
select: {
_id: true,
onCallDutyPolicyId: true,
triggeredByIncidentId: true,
projectId: true,
status: true,
statusMessage: true,
alertSentToUserId: true,
onCallDutyPolicy: {
name: true,
_id: true,
},
alertSentToUser: {
name: true,
email: true,
},
onCallDutyPolicyEscalationRule: {
name: true,
_id: true,
},
onCallDutySchedule: {
name: true,
_id: true,
},
isAcknowledged: true,
acknowledgedAt: true,
userBelongsToTeam: {
name: true,
_id: true,
},
},
props: {
isRoot: true,
},
});
logger.debug("OnCallDutyPolicyExecutionLogTimeline: ");
logger.debug(onCallDutyPolicyExecutionLogTimeline);
if (!onCallDutyPolicyExecutionLogTimeline) {
return;
}
if (!onCallDutyPolicyExecutionLogTimeline.triggeredByIncidentId) {
return;
}
if (
onCallDutyPolicyExecutionLogTimeline.onCallDutyPolicy &&
onCallDutyPolicyExecutionLogTimeline.onCallDutyPolicy.id
) {
const status: OnCallDutyExecutionLogTimelineStatus =
onCallDutyPolicyExecutionLogTimeline.status!;
logger.debug("Status: " + status);
if (
status &&
(status === OnCallDutyExecutionLogTimelineStatus.Skipped ||
status === OnCallDutyExecutionLogTimelineStatus.Error ||
status ===
OnCallDutyExecutionLogTimelineStatus.SuccessfullyAcknowledged ||
status === OnCallDutyExecutionLogTimelineStatus.NotificationSent)
) {
const displayColor: Color = status
? this.getColorBasedOnStatus(status)
: Blue500;
const feedInfoInMarkdown: string = `**On-call alert ${status} to ${onCallDutyPolicyExecutionLogTimeline.alertSentToUser?.name?.toString().trim()}**
The on-call policy **${onCallDutyPolicyExecutionLogTimeline.onCallDutyPolicy.name}** has been triggered. The escalation rule **${onCallDutyPolicyExecutionLogTimeline.onCallDutyPolicyEscalationRule?.name}** ${onCallDutyPolicyExecutionLogTimeline.onCallDutySchedule?.name ? String(" and schedule **" + onCallDutyPolicyExecutionLogTimeline.onCallDutySchedule?.name + "**") : ""} were applied. The user **${onCallDutyPolicyExecutionLogTimeline.alertSentToUser?.name}** (${onCallDutyPolicyExecutionLogTimeline.alertSentToUser?.email}) was alerted. The status of this alert is **${status}** with the message: \`${onCallDutyPolicyExecutionLogTimeline.statusMessage}\`. ${onCallDutyPolicyExecutionLogTimeline.userBelongsToTeam?.name ? "The alert was sent because the user belogs to the team **" + onCallDutyPolicyExecutionLogTimeline.userBelongsToTeam?.name + "**" : ""} ${onCallDutyPolicyExecutionLogTimeline.isAcknowledged ? "The alert was acknowledged at **" + onCallDutyPolicyExecutionLogTimeline.acknowledgedAt + "**" : ""}`;
logger.debug("Feed Info in Markdown: " + feedInfoInMarkdown);
await IncidentFeedService.createIncidentFeed({
incidentId:
onCallDutyPolicyExecutionLogTimeline.triggeredByIncidentId,
projectId: onCallDutyPolicyExecutionLogTimeline.projectId!,
incidentFeedEventType: IncidentFeedEventType.OnCallPolicy,
displayColor: displayColor,
feedInfoInMarkdown: feedInfoInMarkdown,
});
logger.debug("Incident Feed created");
}
}
}
protected override async onCreateSuccess(
_onCreate: OnCreate<Model>,
createdItem: Model,
): Promise<Model> {
logger.debug("OnCallDutyPolicyExecutionLogTimelineService.onCreateSuccess");
logger.debug(createdItem);
await this.addToIncidentFeed({
onCallDutyPolicyExecutionLogTimelineId: createdItem.id!,
});
return createdItem;
}
protected override async onUpdateSuccess(
onUpdate: OnUpdate<Model>,
_updatedItemIds: Array<ObjectID>,
): Promise<OnUpdate<Model>> {
if (onUpdate.updateBy.query) {
const updatedItems: Array<Model> = await this.findBy({
query: onUpdate.updateBy.query,
props: {
isRoot: true,
},
select: {
_id: true,
},
limit: LIMIT_PER_PROJECT,
skip: 0,
});
for (const updatedItem of updatedItems) {
await this.addToIncidentFeed({
onCallDutyPolicyExecutionLogTimelineId: updatedItem.id as ObjectID,
});
}
}
return onUpdate;
}
}
export default new Service();

View File

@@ -1,14 +1,10 @@
import ArrayUtil from "Common/Utils/Array";
import OneUptimeDate from "Common/Types/Date";
import { JSONArray, JSONObject, JSONValue } from "Common/Types/JSON";
import { JSONArray, JSONObject } from "Common/Types/JSON";
import JSONFunctions from "Common/Types/JSONFunctions";
import ObjectID from "Common/Types/ObjectID";
import GlobalCache from "Common/Server/Infrastructure/GlobalCache";
import Metric, {
AggregationTemporality,
} from "Common/Models/AnalyticsModels/Metric";
import TelemetryType from "Common/Types/Telemetry/TelemetryType";
import TelemetryAttributeService from "Common/Server/Services/TelemetryAttributeService";
import Dictionary from "Common/Types/Dictionary";
import ProductType from "Common/Types/MeteredPlan/ProductType";
import { IsBillingEnabled } from "Common/Server/EnvironmentConfig";
@@ -17,6 +13,7 @@ import logger from "Common/Server/Utils/Logger";
import TelemetryService from "Common/Models/DatabaseModels/TelemetryService";
import TelemetryServiceService from "Common/Server/Services/TelemetryServiceService";
import { DEFAULT_RETENTION_IN_DAYS } from "Common/Models/DatabaseModels/TelemetryUsageBilling";
import TelemetryUtil from "Common/Server/Utils/Telemetry/Telemetry";
export enum OtelAggregationTemporality {
Cumulative = "AGGREGATION_TEMPORALITY_CUMULATIVE",
@@ -113,151 +110,6 @@ export default class OTelIngestService {
}
}
public static async indexAttributes(data: {
attributes: string[];
projectId: ObjectID;
telemetryType: TelemetryType;
}): Promise<void> {
// index attributes
const cacheKey: string =
data.projectId.toString() + "_" + data.telemetryType;
// get keys from cache
const cacheKeys: string[] =
(await GlobalCache.getStringArray("telemetryAttributesKeys", cacheKey)) ||
[];
let isKeysMissingInCache: boolean = false;
// check if keys are missing in cache
for (const key of data.attributes) {
if (!cacheKeys.includes(key)) {
isKeysMissingInCache = true;
break;
}
}
// merge keys and remove duplicates
if (isKeysMissingInCache) {
const dbKeys: string[] = await TelemetryAttributeService.fetchAttributes({
projectId: data.projectId,
telemetryType: data.telemetryType,
});
const mergedKeys: Array<string> = ArrayUtil.removeDuplicates([
...dbKeys,
...data.attributes,
...cacheKeys,
]);
await GlobalCache.setStringArray(
"telemetryAttributesKeys",
cacheKey,
mergedKeys,
);
await TelemetryAttributeService.refreshAttributes({
projectId: data.projectId,
telemetryType: data.telemetryType,
attributes: mergedKeys,
});
}
}
public static getAttributes(data: {
items: JSONArray;
telemetryServiceId?: ObjectID;
telemetryServiceName?: string;
}): JSONObject {
const { items } = data;
const finalObj: JSONObject = {};
// We need to convert this to date.
const attributes: JSONArray = items;
type GetValueFunction = (value: JSONValue) => JSONValue;
const getValue: GetValueFunction = (value: JSONValue): JSONValue => {
value = value as JSONObject;
if (value["stringValue"]) {
value = value["stringValue"] as string;
} else if (value["intValue"]) {
value = value["intValue"] as number;
} else if (value["doubleValue"]) {
value = value["doubleValue"] as number;
} else if (value["boolValue"]) {
value = value["boolValue"] as boolean;
} else if (
value["arrayValue"] &&
(value["arrayValue"] as JSONObject)["values"]
) {
value = (
(value["arrayValue"] as JSONObject)["values"] as JSONArray
).map((v: JSONObject) => {
return getValue(v);
});
} else if (
value["mapValue"] &&
(value["mapValue"] as JSONObject)["fields"]
) {
value = getValue((value["mapValue"] as JSONObject)["fields"]);
} else if (value["nullValue"]) {
value = null;
}
return value;
};
if (attributes) {
for (const attribute of attributes) {
if (attribute["key"] && typeof attribute["key"] === "string") {
const value: JSONValue = getValue(attribute["value"]);
finalObj[attribute["key"]] = value;
}
}
}
// add oneuptime specific attributes
if (!finalObj["oneuptime"]) {
finalObj["oneuptime"] = {};
}
if (!(finalObj["oneuptime"] as JSONObject)["telemetry"]) {
(finalObj["oneuptime"] as JSONObject)["telemetry"] = {};
}
if (
!((finalObj["oneuptime"] as JSONObject)["telemetry"] as JSONObject)[
"service"
]
) {
((finalObj["oneuptime"] as JSONObject)["telemetry"] as JSONObject)[
"service"
] = {};
}
if (data.telemetryServiceId) {
(
((finalObj["oneuptime"] as JSONObject)["telemetry"] as JSONObject)[
"service"
] as JSONObject
)["id"] = data.telemetryServiceId.toString();
}
if (data.telemetryServiceName) {
(
((finalObj["oneuptime"] as JSONObject)["telemetry"] as JSONObject)[
"service"
] as JSONObject
)["name"] = data.telemetryServiceName;
}
return JSONFunctions.flattenObject(finalObj);
}
public static getMetricFromDatapoint(data: {
dbMetric: Metric;
datapoint: JSONObject;
@@ -311,7 +163,7 @@ export default class OTelIngestService {
newDbMetric.attributes = {
...(newDbMetric.attributes || {}),
...this.getAttributes({
...TelemetryUtil.getAttributes({
items: datapoint["attributes"] as JSONArray,
telemetryServiceId: data.telemetryServiceId,
telemetryServiceName: data.telemetryServiceName,

View File

@@ -0,0 +1,83 @@
import { Blue500 } from "../../Types/BrandColors";
import Color from "../../Types/Color";
import OneUptimeDate from "../../Types/Date";
import BadDataException from "../../Types/Exception/BadDataException";
import ObjectID from "../../Types/ObjectID";
import { IsBillingEnabled } from "../EnvironmentConfig";
import DatabaseService from "./DatabaseService";
import Model, {
ScheduledMaintenanceFeedEventType,
} from "Common/Models/DatabaseModels/ScheduledMaintenanceFeed";
export class Service extends DatabaseService<Model> {
public constructor() {
super(Model);
if (IsBillingEnabled) {
this.hardDeleteItemsOlderThanInDays("createdAt", 120);
}
}
public async createScheduledMaintenanceFeed(data: {
scheduledMaintenanceId: ObjectID;
feedInfoInMarkdown: string;
scheduledMaintenanceFeedEventType: ScheduledMaintenanceFeedEventType;
projectId: ObjectID;
moreInformationInMarkdown?: string | undefined;
displayColor?: Color | undefined;
userId?: ObjectID | undefined;
postedAt?: Date | undefined;
}): Promise<Model> {
if (!data.scheduledMaintenanceId) {
throw new BadDataException("Scheduled Maintenance ID is required");
}
if (!data.feedInfoInMarkdown) {
throw new BadDataException("Log in markdown is required");
}
if (!data.scheduledMaintenanceFeedEventType) {
throw new BadDataException("Scheduled Maintenance log event is required");
}
if (!data.projectId) {
throw new BadDataException("Project ID is required");
}
if (!data.displayColor) {
data.displayColor = Blue500;
}
const scheduledMaintenanceFeed: Model = new Model();
scheduledMaintenanceFeed.displayColor = data.displayColor;
scheduledMaintenanceFeed.scheduledMaintenanceId =
data.scheduledMaintenanceId;
scheduledMaintenanceFeed.feedInfoInMarkdown = data.feedInfoInMarkdown;
scheduledMaintenanceFeed.scheduledMaintenanceFeedEventType =
data.scheduledMaintenanceFeedEventType;
scheduledMaintenanceFeed.projectId = data.projectId;
if (data.userId) {
scheduledMaintenanceFeed.userId = data.userId;
}
if (data.moreInformationInMarkdown) {
scheduledMaintenanceFeed.moreInformationInMarkdown =
data.moreInformationInMarkdown;
}
if (!data.postedAt) {
scheduledMaintenanceFeed.postedAt = OneUptimeDate.getCurrentDate();
}
return await this.create({
data: scheduledMaintenanceFeed,
props: {
isRoot: true,
},
});
}
}
export default new Service();

View File

@@ -454,8 +454,8 @@ export class Service extends DatabaseService<StatusPage> {
}
public async getStatusPageURL(statusPageId: ObjectID): Promise<string> {
const domains: Array<StatusPageDomain> =
await StatusPageDomainService.findBy({
const domain: StatusPageDomain | null =
await StatusPageDomainService.findOneBy({
query: {
statusPageId: statusPageId,
isSslProvisioned: true,
@@ -463,21 +463,15 @@ export class Service extends DatabaseService<StatusPage> {
select: {
fullDomain: true,
},
skip: 0,
limit: LIMIT_PER_PROJECT,
props: {
isRoot: true,
ignoreHooks: true,
},
});
let statusPageURL: string = domains
.map((d: StatusPageDomain) => {
return d.fullDomain;
})
.join(", ");
let statusPageURL: string = domain?.fullDomain || "";
if (domains.length === 0) {
if (!statusPageURL) {
const host: Hostname = await DatabaseConfig.getHost();
const httpProtocol: Protocol = await DatabaseConfig.getHttpProtocol();
@@ -750,11 +744,11 @@ export class Service extends DatabaseService<StatusPage> {
statusPageId: data.statusPageId,
});
const numberOfDays: number = data.historyDays || 14;
const currentDate: Date = OneUptimeDate.getCurrentDate();
const startDate: Date = OneUptimeDate.getSomeDaysAgo(
data.historyDays || 14,
);
const startAndEndDate: string = `${OneUptimeDate.getDateAsLocalFormattedString(startDate, true)} - ${OneUptimeDate.getDateAsLocalFormattedString(currentDate, true)}`;
const startDate: Date = OneUptimeDate.getSomeDaysAgo(numberOfDays);
const startAndEndDate: string = `${numberOfDays} days (${OneUptimeDate.getDateAsLocalFormattedString(startDate, true)} - ${OneUptimeDate.getDateAsLocalFormattedString(currentDate, true)})`;
if (statusPageResources.length === 0) {
return {

View File

@@ -29,6 +29,7 @@ import StatusPageResource from "Common/Models/DatabaseModels/StatusPageResource"
import Model from "Common/Models/DatabaseModels/StatusPageSubscriber";
import PositiveNumber from "../../Types/PositiveNumber";
import StatusPageEventType from "../../Types/StatusPage/StatusPageEventType";
import NumberUtil from "../../Utils/Number";
export class Service extends DatabaseService<Model> {
public constructor() {
@@ -160,6 +161,22 @@ export class Service extends DatabaseService<Model> {
data.data.projectId = statuspage.projectId;
const isEmailSubscriber: boolean = Boolean(data.data.subscriberEmail);
const isSubscriptionConfirmed: boolean = Boolean(
data.data.isSubscriptionConfirmed,
);
if (isEmailSubscriber && !isSubscriptionConfirmed) {
data.data.isSubscriptionConfirmed = false;
} else {
data.data.isSubscriptionConfirmed = true; // if the subscriber is not email, then set it to true for SMS subscribers.
}
data.data.subscriptionConfirmationToken = NumberUtil.getRandomNumber(
100000,
999999,
).toString();
return { createBy: data, carryForward: statuspage };
}
@@ -180,10 +197,6 @@ export class Service extends DatabaseService<Model> {
onCreate.carryForward.name ||
"Status Page";
const host: Hostname = await DatabaseConfig.getHost();
const httpProtocol: Protocol = await DatabaseConfig.getHttpProtocol();
const unsubscribeLink: string = this.getUnsubscribeLink(
URL.fromString(statusPageURL),
createdItem.id!,
@@ -237,28 +250,235 @@ export class Service extends DatabaseService<Model> {
if (
createdItem.statusPageId &&
createdItem.subscriberEmail &&
createdItem._id &&
createdItem.sendYouHaveSubscribedMessage
createdItem._id
) {
// Call mail service and send an email.
// get status page domain for this status page.
// if the domain is not found, use the internal status page preview link.
const isSubcriptionConfirmed: boolean = Boolean(
createdItem.isSubscriptionConfirmed,
);
if (!isSubcriptionConfirmed) {
await this.sendConfirmSubscriptionEmail({
subscriberId: createdItem.id!,
});
}
if (isSubcriptionConfirmed && createdItem.sendYouHaveSubscribedMessage) {
await this.sendYouHaveSubscribedEmail({
subscriberId: createdItem.id!,
});
}
}
return createdItem;
}
public async sendConfirmSubscriptionEmail(data: {
subscriberId: ObjectID;
}): Promise<void> {
// get subscriber
const subscriber: Model | null = await this.findOneBy({
query: {
_id: data.subscriberId,
},
select: {
statusPageId: true,
subscriberEmail: true,
subscriberPhone: true,
projectId: true,
subscriptionConfirmationToken: true,
sendYouHaveSubscribedMessage: true,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
// get status page
if (!subscriber || !subscriber.statusPageId) {
return;
}
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
query: {
_id: subscriber.statusPageId.toString(),
},
select: {
logoFileId: true,
isPublicStatusPage: true,
pageTitle: true,
name: true,
smtpConfig: {
_id: true,
hostname: true,
port: true,
username: true,
password: true,
fromEmail: true,
fromName: true,
secure: true,
},
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
if (!statusPage || !statusPage.id) {
return;
}
const statusPageURL: string = await StatusPageService.getStatusPageURL(
statusPage.id,
);
const statusPageName: string =
statusPage.pageTitle || statusPage.name || "Status Page";
const host: Hostname = await DatabaseConfig.getHost();
const httpProtocol: Protocol = await DatabaseConfig.getHttpProtocol();
const confirmSubscriptionLink: string = this.getConfirmSubscriptionLink({
statusPageUrl: statusPageURL,
confirmationToken: subscriber.subscriptionConfirmationToken || "",
statusPageSubscriberId: subscriber.id!,
}).toString();
if (
subscriber.statusPageId &&
subscriber.subscriberEmail &&
subscriber._id
) {
MailService.sendMail(
{
toEmail: createdItem.subscriberEmail,
templateType: EmailTemplateType.SubscribedToStatusPage,
toEmail: subscriber.subscriberEmail,
templateType: EmailTemplateType.ConfirmStatusPageSubscription,
vars: {
statusPageName: statusPageName,
logoUrl: onCreate.carryForward.logoFileId
logoUrl: statusPage.logoFileId
? new URL(httpProtocol, host)
.addRoute(FileRoute)
.addRoute("/image/" + onCreate.carryForward.logoFileId)
.addRoute("/image/" + statusPage.logoFileId)
.toString()
: "",
statusPageUrl: statusPageURL,
isPublicStatusPage: onCreate.carryForward.isPublicStatusPage
isPublicStatusPage: statusPage.isPublicStatusPage
? "true"
: "false",
confirmationUrl: confirmSubscriptionLink,
},
subject: "Confirm your subscription to " + statusPageName,
},
{
projectId: subscriber.projectId,
mailServer: ProjectSMTPConfigService.toEmailServer(
statusPage.smtpConfig,
),
},
).catch((err: Error) => {
logger.error(err);
});
}
}
public async sendYouHaveSubscribedEmail(data: {
subscriberId: ObjectID;
}): Promise<void> {
// get subscriber
const subscriber: Model | null = await this.findOneBy({
query: {
_id: data.subscriberId,
},
select: {
statusPageId: true,
subscriberEmail: true,
subscriberPhone: true,
projectId: true,
sendYouHaveSubscribedMessage: true,
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
// get status page
if (!subscriber || !subscriber.statusPageId) {
return;
}
const statusPage: StatusPage | null = await StatusPageService.findOneBy({
query: {
_id: subscriber.statusPageId.toString(),
},
select: {
logoFileId: true,
isPublicStatusPage: true,
pageTitle: true,
name: true,
smtpConfig: {
_id: true,
hostname: true,
port: true,
username: true,
password: true,
fromEmail: true,
fromName: true,
secure: true,
},
},
props: {
isRoot: true,
ignoreHooks: true,
},
});
if (!statusPage || !statusPage.id) {
return;
}
const statusPageURL: string = await StatusPageService.getStatusPageURL(
statusPage.id,
);
const statusPageName: string =
statusPage.pageTitle || statusPage.name || "Status Page";
const host: Hostname = await DatabaseConfig.getHost();
const httpProtocol: Protocol = await DatabaseConfig.getHttpProtocol();
const unsubscribeLink: string = this.getUnsubscribeLink(
URL.fromString(statusPageURL),
subscriber.id!,
).toString();
if (
subscriber.statusPageId &&
subscriber.subscriberEmail &&
subscriber._id
) {
MailService.sendMail(
{
toEmail: subscriber.subscriberEmail,
templateType: EmailTemplateType.SubscribedToStatusPage,
vars: {
statusPageName: statusPageName,
logoUrl: statusPage.logoFileId
? new URL(httpProtocol, host)
.addRoute(FileRoute)
.addRoute("/image/" + statusPage.logoFileId)
.toString()
: "",
statusPageUrl: statusPageURL,
isPublicStatusPage: statusPage.isPublicStatusPage
? "true"
: "false",
unsubscribeUrl: unsubscribeLink,
@@ -266,17 +486,25 @@ export class Service extends DatabaseService<Model> {
subject: "You have been subscribed to " + statusPageName,
},
{
projectId: createdItem.projectId,
projectId: subscriber.projectId,
mailServer: ProjectSMTPConfigService.toEmailServer(
onCreate.carryForward.smtpConfig,
statusPage.smtpConfig,
),
},
).catch((err: Error) => {
logger.error(err);
});
}
}
return createdItem;
public getConfirmSubscriptionLink(data: {
statusPageUrl: string;
confirmationToken: string;
statusPageSubscriberId: ObjectID;
}): URL {
return URL.fromString(data.statusPageUrl).addRoute(
`/confirm-subscription/${data.statusPageSubscriberId.toString()}?verification-token=${data.confirmationToken}`,
);
}
public async getSubscribersByStatusPage(
@@ -287,6 +515,7 @@ export class Service extends DatabaseService<Model> {
query: {
statusPageId: statusPageId,
isUnsubscribed: false,
isSubscriptionConfirmed: true,
},
select: {
_id: true,

View File

@@ -145,7 +145,7 @@ export class Service extends DatabaseService<Model> {
data: {
status: UserNotificationExecutionStatus.Error, // now the worker will pick this up and complete this or mark this as failed.
statusMessage:
"No notification rules found. Please add rules in User Settings > On-Call Rules.",
"No notification rules found for this user. User should add the rules in User Settings > On-Call Rules.",
},
props: {
isRoot: true,
@@ -158,7 +158,7 @@ export class Service extends DatabaseService<Model> {
data: {
status: OnCallDutyExecutionLogTimelineStatus.Error,
statusMessage:
"No notification rules found. Please add rules in User Settings > On-Call Rules.",
"No notification rules found for this user. User should add the rules in User Settings > On-Call Rules.",
},
props: {
isRoot: true,

View File

@@ -2,6 +2,7 @@ import AggregationInterval from "Common/Types/BaseDatabase/AggregationInterval";
import CommonAggregateBy from "Common/Types/BaseDatabase/AggregateBy";
import AnalyticsBaseModel from "Common/Models/AnalyticsModels/AnalyticsBaseModel/AnalyticsBaseModel";
import DatabaseCommonInteractionProps from "Common/Types/BaseDatabase/DatabaseCommonInteractionProps";
import OneUptimeDate from "../../../Types/Date";
export default interface AggregateBy<TBaseModel extends AnalyticsBaseModel>
extends CommonAggregateBy<TBaseModel> {
@@ -13,21 +14,24 @@ export class AggregateUtil {
startDate: Date;
endDate: Date;
}): AggregationInterval {
data.startDate = OneUptimeDate.fromString(data.startDate);
data.endDate = OneUptimeDate.fromString(data.endDate);
const diff: number = data.endDate.getTime() - data.startDate.getTime();
if (diff <= 1000 * 60 * 60 * 3) {
// if less than 3 hours, then get minute precision
return AggregationInterval.Minute;
} else if (diff <= 1000 * 60 * 60 * 24 * 3) {
} else if (diff <= 1000 * 60 * 60 * 24 * 7) {
// 3 days
return AggregationInterval.Hour;
} else if (diff <= 1000 * 60 * 60 * 24 * 7 * 3) {
} else if (diff <= 1000 * 60 * 60 * 24 * 7 * 6) {
// 3 weeks
return AggregationInterval.Day;
} else if (diff <= 1000 * 60 * 60 * 24 * 30 * 3) {
} else if (diff <= 1000 * 60 * 60 * 24 * 30 * 6) {
// 3 months
return AggregationInterval.Week;
} else if (diff <= 1000 * 60 * 60 * 24 * 365 * 3) {
} else if (diff <= 1000 * 60 * 60 * 24 * 365 * 6) {
// 3 years
return AggregationInterval.Month;
}

View File

@@ -8,6 +8,23 @@ import { FindOperator, Raw } from "typeorm";
import { FindWhereProperty } from "../../../Types/BaseDatabase/Query";
export default class QueryHelper {
public static modulo(
moduloBy: number,
reminder: number,
): FindWhereProperty<any> {
const rid: string = Text.generateRandomText(10);
const rid2: string = Text.generateRandomText(10);
return Raw(
(alias: string) => {
return `(${alias} % :${rid} = :${rid2})`;
},
{
[rid]: moduloBy,
[rid2]: reminder,
},
);
}
public static findWithSameText(
text: string | number,
): FindWhereProperty<any> {

View File

@@ -4,7 +4,6 @@ import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import APIException from "Common/Types/Exception/ApiException";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import ComponentMetadata, { Port } from "Common/Types/Workflow/Component";
@@ -53,18 +52,23 @@ export default class ApiDelete extends ComponentCode {
if (err instanceof HTTPErrorResponse) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(err),
executePort: result.successPort,
executePort: result.errorPort,
});
}
if (apiResult) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(apiResult),
executePort: result.successPort,
executePort: result.errorPort,
});
}
throw options.onError(new APIException("Something wrong happened."));
return Promise.resolve({
returnValues: {
errorMessage: (err as Error).message || "Unknown error",
},
executePort: result.errorPort,
});
}
}
}

View File

@@ -4,7 +4,6 @@ import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import APIException from "Common/Types/Exception/ApiException";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import ComponentMetadata, { Port } from "Common/Types/Workflow/Component";
@@ -53,18 +52,23 @@ export default class ApiGet extends ComponentCode {
if (err instanceof HTTPErrorResponse) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(err),
executePort: result.successPort,
executePort: result.errorPort,
});
}
if (apiResult) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(apiResult),
executePort: result.successPort,
executePort: result.errorPort,
});
}
throw options.onError(new APIException("Something wrong happened."));
return Promise.resolve({
returnValues: {
errorMessage: (err as Error).message || "Unknown error",
},
executePort: result.errorPort,
});
}
}
}

View File

@@ -4,7 +4,6 @@ import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import APIException from "Common/Types/Exception/ApiException";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import ComponentMetadata, { Port } from "Common/Types/Workflow/Component";
@@ -53,18 +52,23 @@ export default class ApiPut extends ComponentCode {
if (err instanceof HTTPErrorResponse) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(err),
executePort: result.successPort,
executePort: result.errorPort,
});
}
if (apiResult) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(apiResult),
executePort: result.successPort,
executePort: result.errorPort,
});
}
throw options.onError(new APIException("Something wrong happened."));
return Promise.resolve({
returnValues: {
errorMessage: (err as Error).message || "Unknown error",
},
executePort: result.errorPort,
});
}
}
}

View File

@@ -4,7 +4,6 @@ import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import APIException from "Common/Types/Exception/ApiException";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import ComponentMetadata, { Port } from "Common/Types/Workflow/Component";
@@ -75,21 +74,26 @@ export default class ApiPost extends ComponentCode {
if (err instanceof HTTPErrorResponse) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(err),
executePort: result.successPort,
executePort: result.errorPort,
});
}
if (apiResult) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(apiResult),
executePort: result.successPort,
executePort: result.errorPort,
});
}
logger.debug("API Post Component is done with error.");
logger.debug(err);
throw options.onError(new APIException("Something wrong happened."));
return Promise.resolve({
returnValues: {
errorMessage: (err as Error).message || "Unknown error",
},
executePort: result.errorPort,
});
}
}
}

View File

@@ -4,7 +4,6 @@ import HTTPErrorResponse from "Common/Types/API/HTTPErrorResponse";
import HTTPResponse from "Common/Types/API/HTTPResponse";
import URL from "Common/Types/API/URL";
import Dictionary from "Common/Types/Dictionary";
import APIException from "Common/Types/Exception/ApiException";
import BadDataException from "Common/Types/Exception/BadDataException";
import { JSONObject } from "Common/Types/JSON";
import ComponentMetadata, { Port } from "Common/Types/Workflow/Component";
@@ -53,18 +52,23 @@ export default class ApiPut extends ComponentCode {
if (err instanceof HTTPErrorResponse) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(err),
executePort: result.successPort,
executePort: result.errorPort,
});
}
if (apiResult) {
return Promise.resolve({
returnValues: ApiComponentUtils.getReturnValues(apiResult),
executePort: result.successPort,
executePort: result.errorPort,
});
}
throw options.onError(new APIException("Something wrong happened."));
return Promise.resolve({
returnValues: {
errorMessage: (err as Error).message || "Unknown error",
},
executePort: result.errorPort,
});
}
}
}

View File

@@ -548,11 +548,25 @@ export default class StatementGenerator<TBaseModel extends AnalyticsBaseModel> {
`${aggregationMethod}(${aggregateBy.aggregateColumnName.toString()}) as ${aggregateBy.aggregateColumnName.toString()}, date_trunc('${aggregationInterval.toLowerCase()}', toStartOfInterval(${aggregateBy.aggregationTimestampColumnName.toString()}, INTERVAL 1 ${aggregationInterval.toLowerCase()})) as ${aggregateBy.aggregationTimestampColumnName.toString()}`,
);
const columns: Array<string> = [
aggregateBy.aggregateColumnName.toString(),
aggregateBy.aggregationTimestampColumnName.toString(),
];
if (aggregateBy.groupBy && Object.keys(aggregateBy.groupBy).length > 0) {
const groupByStatement: Statement = this.toGroupByStatement(
aggregateBy.groupBy,
);
selectStatement.append(SQL`, `).append(groupByStatement);
// add to columns.
for (const key in aggregateBy.groupBy) {
columns.push(key);
}
}
return {
columns: [
aggregateBy.aggregateColumnName.toString(),
aggregateBy.aggregationTimestampColumnName.toString(),
],
columns: columns,
statement: selectStatement,
};
}

View File

@@ -0,0 +1,194 @@
import {
Page as PlaywrightPage,
Browser as PlaywrightBrowser,
chromium,
firefox,
} from "playwright";
import LocalFile from "./LocalFile";
import BadDataException from "../../Types/Exception/BadDataException";
import ScreenSizeType from "../../Types/ScreenSizeType";
import BrowserType from "../../Types/BrowserType";
import logger from "./Logger";
export type Page = PlaywrightPage;
export type Browser = PlaywrightBrowser;
export default class BrowserUtil {
public static async convertHtmlToBase64Screenshot(data: {
html: string;
}): Promise<string | null> {
try {
const html: string = data.html;
const pageAndBrowser: {
page: Page;
browser: Browser;
} = await BrowserUtil.getPageByBrowserType({
browserType: BrowserType.Chromium,
screenSizeType: ScreenSizeType.Desktop,
});
const page: Page = pageAndBrowser.page;
const browser: Browser = pageAndBrowser.browser;
await page.setContent(html, {
waitUntil: "domcontentloaded",
});
const screenshot: Buffer = await page.screenshot({ type: "png" });
await browser.close();
return screenshot.toString("base64");
} catch (e) {
logger.debug(e);
return null;
}
}
public static async getPageByBrowserType(data: {
browserType: BrowserType;
screenSizeType: ScreenSizeType;
}): Promise<{
page: Page;
browser: Browser;
}> {
const viewport: {
height: number;
width: number;
} = BrowserUtil.getViewportHeightAndWidth({
screenSizeType: data.screenSizeType,
});
let page: Page | null = null;
let browser: Browser | null = null;
if (data.browserType === BrowserType.Chromium) {
browser = await chromium.launch({
executablePath: await BrowserUtil.getChromeExecutablePath(),
});
page = await browser.newPage();
}
if (data.browserType === BrowserType.Firefox) {
browser = await firefox.launch({
executablePath: await BrowserUtil.getFirefoxExecutablePath(),
});
page = await browser.newPage();
}
// if (data.browserType === BrowserType.Webkit) {
// browser = await webkit.launch();
// page = await browser.newPage();
// }
await page?.setViewportSize({
width: viewport.width,
height: viewport.height,
});
if (!browser) {
throw new BadDataException("Invalid Browser Type.");
}
if (!page) {
// close the browser if page is not created
await browser.close();
throw new BadDataException("Invalid Browser Type.");
}
return {
page: page,
browser: browser,
};
}
public static getViewportHeightAndWidth(options: {
screenSizeType: ScreenSizeType;
}): {
height: number;
width: number;
} {
let viewPortHeight: number = 0;
let viewPortWidth: number = 0;
switch (options.screenSizeType) {
case ScreenSizeType.Desktop:
viewPortHeight = 1080;
viewPortWidth = 1920;
break;
case ScreenSizeType.Mobile:
viewPortHeight = 640;
viewPortWidth = 360;
break;
case ScreenSizeType.Tablet:
viewPortHeight = 768;
viewPortWidth = 1024;
break;
default:
viewPortHeight = 1080;
viewPortWidth = 1920;
break;
}
return { height: viewPortHeight, width: viewPortWidth };
}
public static async getChromeExecutablePath(): Promise<string> {
const doesDirectoryExist: boolean = await LocalFile.doesDirectoryExist(
"/root/.cache/ms-playwright",
);
if (!doesDirectoryExist) {
throw new BadDataException("Chrome executable path not found.");
}
// get list of files in the directory
const directories: string[] = await LocalFile.getListOfDirectories(
"/root/.cache/ms-playwright",
);
if (directories.length === 0) {
throw new BadDataException("Chrome executable path not found.");
}
const chromeInstallationName: string | undefined = directories.find(
(directory: string) => {
return directory.includes("chromium");
},
);
if (!chromeInstallationName) {
throw new BadDataException("Chrome executable path not found.");
}
return `/root/.cache/ms-playwright/${chromeInstallationName}/chrome-linux/chrome`;
}
public static async getFirefoxExecutablePath(): Promise<string> {
const doesDirectoryExist: boolean = await LocalFile.doesDirectoryExist(
"/root/.cache/ms-playwright",
);
if (!doesDirectoryExist) {
throw new BadDataException("Firefox executable path not found.");
}
// get list of files in the directory
const directories: string[] = await LocalFile.getListOfDirectories(
"/root/.cache/ms-playwright",
);
if (directories.length === 0) {
throw new BadDataException("Firefox executable path not found.");
}
const firefoxInstallationName: string | undefined = directories.find(
(directory: string) => {
return directory.includes("firefox");
},
);
if (!firefoxInstallationName) {
throw new BadDataException("Firefox executable path not found.");
}
return `/root/.cache/ms-playwright/${firefoxInstallationName}/firefox/firefox`;
}
}

View File

@@ -70,7 +70,10 @@ export default class APIRequestCriteria {
}
//check response code
if (input.criteriaFilter.checkOn === CheckOn.ResponseStatusCode) {
if (
input.criteriaFilter.checkOn === CheckOn.ResponseStatusCode &&
(input.dataToProcess as ProbeMonitorResponse).responseCode
) {
threshold = CompareCriteria.convertToNumber(threshold);
const value: Array<number> | number =

View File

@@ -31,6 +31,8 @@ export default class CompareCriteria {
value: boolean | Array<boolean>;
evaluationType?: EvaluateOverTimeType | undefined;
}): boolean {
logger.debug(`isTrue: ${JSON.stringify(data)}`);
if (Array.isArray(data.value)) {
if (data.evaluationType === EvaluateOverTimeType.AnyValue) {
return data.value.some((value: boolean) => {
@@ -49,6 +51,8 @@ export default class CompareCriteria {
value: boolean | Array<boolean>;
evaluationType?: EvaluateOverTimeType | undefined;
}): boolean {
logger.debug(`isFalse: ${JSON.stringify(data)}`);
if (Array.isArray(data.value)) {
if (data.evaluationType === EvaluateOverTimeType.AnyValue) {
return data.value.some((value: boolean) => {
@@ -304,6 +308,8 @@ export default class CompareCriteria {
value: Array<boolean> | boolean;
criteriaFilter: CriteriaFilter;
}): string | null {
logger.debug(`compareCriteriaBoolean: ${JSON.stringify(data)}`);
if (data.value === null || data.value === undefined) {
return null;
}
@@ -313,7 +319,8 @@ export default class CompareCriteria {
CompareCriteria.isTrue({
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -331,7 +338,8 @@ export default class CompareCriteria {
CompareCriteria.isFalse({
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -366,7 +374,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -385,7 +394,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -404,7 +414,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -423,7 +434,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -442,7 +454,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -461,7 +474,8 @@ export default class CompareCriteria {
threshold: data.threshold as number,
value: data.value,
evaluationType:
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType,
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ||
data.criteriaFilter.metricMonitorOptions?.metricAggregationType,
})
) {
return CompareCriteria.getCompareMessage({
@@ -485,17 +499,19 @@ export default class CompareCriteria {
// CPU Percent over the last 5 minutes is 10 which is less than the threshold of 20
let message: string = "";
if (
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ===
EvaluateOverTimeType.AnyValue
) {
let evaluationType: EvaluateOverTimeType | undefined =
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType;
if (data.criteriaFilter.metricMonitorOptions?.metricAggregationType) {
evaluationType =
data.criteriaFilter.metricMonitorOptions.metricAggregationType;
}
if (evaluationType === EvaluateOverTimeType.AnyValue) {
message += "Any value of";
}
if (
data.criteriaFilter.evaluateOverTimeOptions?.evaluateOverTimeType ===
EvaluateOverTimeType.AllValues
) {
if (evaluationType === EvaluateOverTimeType.AllValues) {
message += "All values of";
}
@@ -544,6 +560,9 @@ export default class CompareCriteria {
case FilterType.NotEqualTo:
message += ` not equal to ${data.threshold}. `;
break;
case FilterType.EqualTo:
message += ` equal to ${data.threshold}. `;
break;
case FilterType.Contains:
message += ` contains ${data.threshold}. `;
break;

View File

@@ -1,4 +1,3 @@
import MonitorMetricsByMinuteService from "../../../Services/MonitorMetricsByMinuteService";
import Query from "../../../Types/AnalyticsDatabase/Query";
import GreaterThanOrEqual from "Common/Types/BaseDatabase/GreaterThanOrEqual";
import { LIMIT_PER_PROJECT } from "Common/Types/Database/LimitMax";
@@ -10,7 +9,9 @@ import {
EvaluateOverTimeType,
} from "Common/Types/Monitor/CriteriaFilter";
import ObjectID from "Common/Types/ObjectID";
import MonitorMetricsByMinute from "Common/Models/AnalyticsModels/MonitorMetricsByMinute";
import Metric from "../../../../Models/AnalyticsModels/Metric";
import MonitorMetricTypeUtil from "../../../../Utils/Monitor/MonitorMetricType";
import MetricService from "../../../Services/MetricService";
export default class EvaluateOverTime {
public static async getValueOverTime(data: {
@@ -27,36 +28,35 @@ export default class EvaluateOverTime {
// TODO: Query over miscData
const query: Query<MonitorMetricsByMinute> = {
const query: Query<Metric> = {
createdAt: new GreaterThanOrEqual(lastMinutesDate),
monitorId: data.monitorId,
metricType: data.metricType,
serviceId: data.monitorId,
name: MonitorMetricTypeUtil.getMonitorMeticTypeByCheckOn(data.metricType),
};
if (data.miscData) {
query.miscData = data.miscData;
query.attributes = data.miscData;
}
const monitorMetricsItems: Array<MonitorMetricsByMinute> =
await MonitorMetricsByMinuteService.findBy({
query: query,
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
select: {
metricValue: true,
},
});
const monitorMetricsItems: Array<Metric> = await MetricService.findBy({
query: query,
limit: LIMIT_PER_PROJECT,
skip: 0,
props: {
isRoot: true,
},
select: {
value: true,
},
});
const values: Array<number | boolean> = monitorMetricsItems
.map((item: MonitorMetricsByMinute) => {
.map((item: Metric) => {
if (data.metricType === CheckOn.IsOnline) {
return item.metricValue === 1;
return item.value === 1;
}
return item.metricValue;
return item.value;
})
.filter((value: number | boolean | undefined) => {
return value !== undefined;

View File

@@ -0,0 +1,127 @@
import AggregateModel from "../../../../Types/BaseDatabase/AggregatedModel";
import AggregatedResult from "../../../../Types/BaseDatabase/AggregatedResult";
import MetricFormulaConfigData from "../../../../Types/Metrics/MetricFormulaConfigData";
import MetricQueryConfigData from "../../../../Types/Metrics/MetricQueryConfigData";
import MetricMonitorResponse from "../../../../Types/Monitor/MetricMonitor/MetricMonitorResponse";
import MonitorStep from "../../../../Types/Monitor/MonitorStep";
import DataToProcess from "../DataToProcess";
import CompareCriteria from "./CompareCriteria";
import {
CheckOn,
CriteriaFilter,
EvaluateOverTimeType,
} from "Common/Types/Monitor/CriteriaFilter";
export default class MetricMonitorCriteria {
public static async isMonitorInstanceCriteriaFilterMet(input: {
dataToProcess: DataToProcess;
criteriaFilter: CriteriaFilter;
monitorStep: MonitorStep;
}): Promise<string | null> {
// Metric Monitoring Check
if (
input.criteriaFilter.metricMonitorOptions &&
!input.criteriaFilter.metricMonitorOptions.metricAggregationType
) {
input.criteriaFilter.metricMonitorOptions.metricAggregationType =
EvaluateOverTimeType.AnyValue;
}
let threshold: number | string | undefined | null =
input.criteriaFilter.value;
if (input.criteriaFilter.checkOn === CheckOn.MetricValue) {
threshold = CompareCriteria.convertToNumber(threshold);
const metricAggregaredResult: Array<AggregatedResult> =
(input.dataToProcess as MetricMonitorResponse).metricResult || [];
const metricAlias: string =
input.criteriaFilter.metricMonitorOptions?.metricAlias || "";
// Pick based on the alias, or if there's no alias, pick the first one
let aliasIndex: number =
input.monitorStep.data?.metricMonitor?.metricViewConfig?.queryConfigs.findIndex(
(queryConfig: MetricQueryConfigData) => {
return queryConfig.metricAliasData?.metricVariable === metricAlias;
},
) || -1;
if (aliasIndex < 0) {
// then try to find in formula
let formulaIndex: number =
input.monitorStep.data?.metricMonitor?.metricViewConfig?.formulaConfigs.findIndex(
(formulaConfig: MetricFormulaConfigData) => {
return (
formulaConfig.metricAliasData?.metricVariable === metricAlias
);
},
) || -1;
if (formulaIndex >= 0) {
// add number of queries to the index
formulaIndex =
formulaIndex +
(input.monitorStep.data?.metricMonitor?.metricViewConfig
?.queryConfigs.length || 0);
aliasIndex = formulaIndex;
}
}
const aggregatedResult: AggregatedResult | undefined =
metricAggregaredResult &&
metricAggregaredResult.length >= aliasIndex - 1 &&
aliasIndex >= 0
? metricAggregaredResult[aliasIndex]
: metricAggregaredResult[0] || undefined;
if (metricAlias) {
// find the index of the alias in the dataToProcess.
const indexOfAlias: number = (
input.dataToProcess as MetricMonitorResponse
).metricViewConfig.queryConfigs.findIndex(
(queryConfig: MetricQueryConfigData) => {
return queryConfig.metricAliasData?.metricVariable === metricAlias;
},
);
// now get the aggregated result for that alias
if (indexOfAlias !== -1) {
const aggregatedResultForAlias: AggregatedResult | undefined =
metricAggregaredResult[indexOfAlias];
if (aggregatedResultForAlias) {
const numbers: Array<number> = aggregatedResultForAlias.data.map(
(data: AggregateModel) => {
return data.value;
},
);
return CompareCriteria.compareCriteriaNumbers({
value: numbers && numbers.length > 0 ? numbers : 0,
threshold: threshold as number,
criteriaFilter: input.criteriaFilter,
});
}
}
}
// if there's no alias then this is the default case
if (aggregatedResult) {
const numbers: Array<number> = aggregatedResult.data.map(
(data: AggregateModel) => {
return data.value;
},
);
return CompareCriteria.compareCriteriaNumbers({
value: numbers && numbers.length > 0 ? numbers : 0,
threshold: threshold as number,
criteriaFilter: input.criteriaFilter,
});
}
}
return null;
}
}

Some files were not shown because too many files have changed in this diff Show More