Compare commits

...

469 Commits

Author SHA1 Message Date
Simon Larsen
6c1bd10873 fix current status on sp 2023-11-02 13:39:59 +00:00
Simon Larsen
05a288c761 fix jest types 2023-11-02 11:11:36 +00:00
Simon Larsen
a9f503da9d fix probe tests 2023-11-02 10:21:31 +00:00
Simon Larsen
49d3655502 fix fmt 2023-11-01 20:49:00 +00:00
Simon Larsen
1cdcc639b4 fix fmt 2023-11-01 20:45:23 +00:00
Simon Larsen
7568c70b50 fix fmt 2023-11-01 20:15:18 +00:00
Simon Larsen
6259f81a91 add ui changes 2023-11-01 19:38:58 +00:00
Simon Larsen
f40c1daeb8 fix fmt 2023-11-01 14:15:43 +00:00
Simon Larsen
bb73ed14cd add animate in status bubble 2023-11-01 10:44:01 +00:00
Simon Larsen
4b71a81f7c fix fmt on uptime 2023-11-01 10:40:03 +00:00
Simon Larsen
d6788c138b fix uptime graph 2023-11-01 10:34:38 +00:00
Simon Larsen
28f4a1f473 add status api 2023-10-31 17:05:56 +00:00
Simon Larsen
ccb4781c06 enable compression 2023-10-31 14:10:57 +00:00
Simon Larsen
2e27347225 fix fmt 2023-10-31 12:19:47 +00:00
Simon Larsen
e9015f0eff Merge branch 'master' of github.com-simon:OneUptime/oneuptime 2023-10-31 12:06:27 +00:00
Simon Larsen
6cf8560151 fix eslint 2023-10-31 12:06:24 +00:00
Simon Larsen
7d2e91d867 Merge pull request #854 from hasannadeem/tests/notification-middleware-and-cookie-utils
Tests for notification middleware and cookie utils
2023-10-31 11:57:11 +00:00
Simon Larsen
46e0210dcc Merge pull request #869 from fakharj/eslint-object-curly-spacing
add eslint object-curly-spacing rule
2023-10-31 09:58:24 +00:00
Simon Larsen
02fc5502eb Merge branch 'master' into eslint-object-curly-spacing 2023-10-31 09:58:17 +00:00
Simon Larsen
ce3131edaf Merge pull request #865 from fakharj/eslint-unneeded-ternary
added no unneeded ternary in eslint
2023-10-30 13:56:03 +00:00
fakharj
ca4716133a add eslint object-curly-spacing rule 2023-10-30 18:43:43 +05:00
Simon Larsen
9cb254f9d1 Merge pull request #862 from OneUptime/dependabot/npm_and_yarn/Common/crypto-js-4.2.0
Bump crypto-js from 4.1.1 to 4.2.0 in /Common
2023-10-30 11:13:47 +00:00
Simon Larsen
d51fbdf5f7 Merge pull request #868 from cheese-framework/master
Add test suites for JSONFunctions and SerializableObject
2023-10-30 11:10:51 +00:00
Simon Larsen
57b7b5b39e Merge pull request #855 from hammadfauz/duplicateModalTest
Duplicate modal test
2023-10-30 08:24:18 +00:00
Drantaz
2e46ebd0e8 Merge branch 'master' of https://github.com/cheese-framework/oneuptime 2023-10-27 20:25:23 +00:00
Drantaz
4ffe215665 Add test suites for JSONFunctions and SerializableObject 2023-10-27 20:24:33 +00:00
Hammad
e680346f1f fixes lint 2023-10-27 23:48:27 +05:00
Hammad
4faa8d32f6 adds test ids to key elements 2023-10-27 23:27:33 +05:00
Simon Larsen
ab07ff0104 fix fmt 2023-10-27 17:22:20 +01:00
Simon Larsen
03dd6fef04 Merge branch 'master' of github.com-simon:OneUptime/oneuptime 2023-10-27 17:15:29 +01:00
Simon Larsen
31c0ff7dea Merge branch 'feature-flags' 2023-10-27 17:15:18 +01:00
Simon Larsen
dca1d2c370 add call and sms cost 2023-10-27 17:14:58 +01:00
Simon Larsen
fc218a970a Merge pull request #861 from OneUptime/feature-flags
add feature flag page
2023-10-27 16:40:30 +01:00
Simon Larsen
17509225ee add monitor groups. 2023-10-27 16:39:33 +01:00
Simon Larsen
447bac1d67 fix undefined in page title 2023-10-27 16:20:04 +01:00
Simon Larsen
67b3b224a7 fix monitor group api 2023-10-27 16:17:28 +01:00
Simon Larsen
48fbf50973 add current status 2023-10-27 13:13:32 +01:00
fakharj
a0acb24651 added no unneeded ternary in eslint 2023-10-27 13:03:07 +05:00
Simon Larsen
c958893d67 increase timeout to 30 secs 2023-10-26 20:51:00 +01:00
Simon Larsen
9e2bd15cf4 fix fmt 2023-10-26 19:58:47 +01:00
Simon Larsen
17e9ad4fcd Merge branch 'master' into feature-flags 2023-10-26 19:10:30 +01:00
Simon Larsen
4d5a49f11e fix fmt 2023-10-26 19:09:29 +01:00
Simon Larsen
2d9b9950dd when monitors are timeout mark them as offline. 2023-10-26 19:09:03 +01:00
Simon Larsen
c3c0fbc853 fix fmt 2023-10-26 15:24:15 +01:00
Simon Larsen
f970b02e9e monitor group view 2023-10-26 15:20:38 +01:00
Simon Larsen
987394be41 monitor groups page 2023-10-26 15:02:48 +01:00
Simon Larsen
34b3dff108 add service 2023-10-26 14:53:26 +01:00
Simon Larsen
b603241d57 add incidents page 2023-10-26 14:51:45 +01:00
Simon Larsen
8df01fc098 add owners page 2023-10-26 14:26:58 +01:00
Simon Larsen
268305e6cd add services and perms 2023-10-26 14:14:52 +01:00
Simon Larsen
bbb53b3321 make delete work 2023-10-26 13:52:05 +01:00
Simon Larsen
c79fa88ad1 add monitor group resource 2023-10-26 13:37:31 +01:00
Simon Larsen
35c5e57752 add read perms to domain 2023-10-26 13:09:06 +01:00
dependabot[bot]
254a9de101 Bump crypto-js from 4.1.1 to 4.2.0 in /Common
Bumps [crypto-js](https://github.com/brix/crypto-js) from 4.1.1 to 4.2.0.
- [Commits](https://github.com/brix/crypto-js/compare/4.1.1...4.2.0)

---
updated-dependencies:
- dependency-name: crypto-js
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-10-25 22:47:32 +00:00
Simon Larsen
c844bf8e43 add feature flag page 2023-10-25 20:05:28 +01:00
Simon Larsen
c0288716da add labels to status page 2023-10-25 18:06:39 +01:00
Simon Larsen
51e7fa6c9a add labels to status page 2023-10-25 17:46:30 +01:00
Simon Larsen
d9eb60017a fix fmt 2023-10-25 17:37:02 +01:00
Simon Larsen
e9d7b36198 fix labels on status page. 2023-10-25 14:50:28 +01:00
Simon Larsen
7308945061 show incident labels on status page 2023-10-25 14:39:43 +01:00
Simon Larsen
3f8e5e4e0a Merge pull request #860 from cheese-framework/master
Add test suites for Database and API
2023-10-25 12:40:20 +01:00
Drantaz
3f7d186db0 Add test suites for Database and API 2023-10-25 11:10:52 +00:00
Simon Larsen
8cb91d94eb remove auto table creation 2023-10-25 09:21:11 +01:00
Simon Larsen
3337ad2a45 Merge pull request #859 from OneUptime/otel-save
Otel save
2023-10-25 09:20:24 +01:00
Simon Larsen
438fbf4368 add historgram in program 2023-10-24 20:49:10 +01:00
Simon Larsen
ffca1acc9a Merge pull request #858 from cheese-framework/Add-Jest-test-suite-for-Common/Database/Date-#857
Add Test cases for Common/Database/Date
2023-10-24 20:25:24 +01:00
Drantaz
846d5ce104 Add Test cases for Common/Database/Date -- Add linting 2023-10-24 17:14:26 +00:00
Drantaz
43a075436a Add Test cases for Common/Database/Date 2023-10-24 16:55:11 +00:00
Simon Larsen
8fe35d9a29 remove unneeded files 2023-10-24 16:54:14 +01:00
Simon Larsen
849eeac23a add auto table creation 2023-10-24 13:52:37 +01:00
Simon Larsen
01a4cac559 Merge pull request #830 from OneUptime/otel-save
Otel save
2023-10-24 13:50:38 +01:00
Simon Larsen
b4cd4d2c02 comment out table creation 2023-10-24 13:50:15 +01:00
Simon Larsen
329484fb87 fix fmt 2023-10-24 13:49:27 +01:00
Simon Larsen
ee54a324d7 add guage 2023-10-24 13:35:26 +01:00
Simon Larsen
ba2feffbee Merge pull request #826 from hammadfauz/BearerTokenAuthTest
Bearer token auth test
2023-10-24 12:30:07 +01:00
Hammad
4b0b91396b lint fixes 2023-10-24 16:26:53 +05:00
Hammad
f2c6321216 raises proper exception when token is invalid or empty 2023-10-24 16:22:45 +05:00
Simon Larsen
67447c0bd7 fix copy in criteria 2023-10-24 11:57:06 +01:00
Simon Larsen
323646ebcd docker file for otel proj 2023-10-24 11:56:52 +01:00
Simon Larsen
81e4b4435c fix array save 2023-10-24 11:32:02 +01:00
Simon Larsen
842b0664c7 update long numbers 2023-10-24 10:56:41 +01:00
Simon Larsen
0bdab474de fix models 2023-10-24 10:46:44 +01:00
Simon Larsen
ef1b22e62b Merge pull request #856 from OneUptime/simlarsen-patch-1
Update DockerCompose.md
2023-10-24 10:22:51 +01:00
Simon Larsen
3d229a0030 Update DockerCompose.md 2023-10-24 10:22:37 +01:00
Simon Larsen
e34599d18a fix fmt 2023-10-23 20:41:33 +01:00
Simon Larsen
aa7594f2a8 fix fmt 2023-10-23 20:24:36 +01:00
Simon Larsen
0626669b02 add metrics histogram and sum 2023-10-23 20:19:20 +01:00
Simon Larsen
35b949e448 add new models to clickhouse 2023-10-23 20:02:44 +01:00
Simon Larsen
2bb4086fd1 refactor service code 2023-10-23 15:58:16 +01:00
Simon Larsen
03f9c36f06 fix fmt 2023-10-23 15:51:38 +01:00
Simon Larsen
9fe998a43d save logs to otel 2023-10-23 15:27:22 +01:00
Simon Larsen
3841b655e5 add dotnet otel project 2023-10-23 15:17:25 +01:00
Hammad
5ec8ee6dcb removes jest config for ignoring snapshots 2023-10-23 19:17:19 +05:00
Hammad
a1c6121bee tests UI by querying key elements and removes snapshots 2023-10-23 19:15:10 +05:00
Hammad
51c76aa1af moves test suite into Components as per convention 2023-10-23 19:14:04 +05:00
Simon Larsen
40ee5d775b fix fmt 2023-10-23 12:57:02 +01:00
Simon Larsen
88f0e2af51 set default card for customers if they dont have it. 2023-10-23 11:27:19 +01:00
Hammad
e702a0b0d2 Fixes lint errors 2023-10-21 15:49:54 +05:00
Hammad
cfc2f99248 adds tests for DuplicateModel 2023-10-21 15:07:20 +05:00
Hammad
f23bb3af41 fixes jest mistaking snapshots for test suites 2023-10-21 15:06:46 +05:00
Simon Larsen
2cdf1236be fix fmt 2023-10-20 19:52:58 +01:00
Simon Larsen
ed5a144735 fix insert into db 2023-10-20 19:51:23 +01:00
Simon Larsen
e687a439e6 fix fmt 2023-10-20 17:58:45 +01:00
Simon Larsen
cfa20e2be6 generate insert statement 2023-10-20 17:43:50 +01:00
hasan
9205764deb Add tests for notification middleware and cookie utils 2023-10-20 18:44:33 +05:00
Simon Larsen
32275837ac fix nested type 2023-10-20 14:26:01 +01:00
Simon Larsen
34568a39f5 refactor staement generator into a new file 2023-10-20 13:01:51 +01:00
Simon Larsen
b7b41dfebb Merge branch 'master' into otel-save 2023-10-20 11:27:21 +01:00
Simon Larsen
9b40011196 add more shadow 2023-10-20 11:24:02 +01:00
Simon Larsen
d644287a0c fix video close 2023-10-20 11:18:19 +01:00
Simon Larsen
ea7dc0b918 add hiring link in footer 2023-10-19 18:52:24 +01:00
Simon Larsen
c34639a3bb add watch demo to status page 2023-10-19 18:23:54 +01:00
Simon Larsen
41ba37be80 nested model 2023-10-19 16:06:09 +01:00
Simon Larsen
954d5be113 fix clickhouse 2023-10-18 18:18:00 +01:00
Simon Larsen
21a857d912 fix import 2023-10-18 17:47:45 +01:00
Simon Larsen
cb0f7bbad5 fix fmt 2023-10-18 17:44:54 +01:00
Simon Larsen
c3c94f3634 add table 2023-10-18 17:43:18 +01:00
Simon Larsen
955141d42e add logs, metrics and spans 2023-10-18 17:28:49 +01:00
Simon Larsen
352c9ffb8e otel save 2023-10-18 13:59:19 +01:00
Simon Larsen
d543757a7d open collector port 2023-10-18 12:19:52 +01:00
Simon Larsen
b3cfdbf45a enable gpu on llama docker 2023-10-18 12:07:37 +01:00
Simon Larsen
c629921d01 fix llama docker file. 2023-10-18 11:01:15 +01:00
Simon Larsen
008e0c50b1 fix cluster domain in helm chart 2023-10-18 10:16:23 +01:00
Simon Larsen
fcf916bdfe change to alert emoji 2023-10-17 14:48:54 +01:00
Simon Larsen
9850bcf0e7 fix fmt 2023-10-17 14:30:25 +01:00
Simon Larsen
e1efeec9ec add cookie set 2023-10-17 13:49:43 +01:00
Simon Larsen
7e34393fc6 fix api url 2023-10-17 12:59:14 +01:00
Simon Larsen
262fffd9ff fix url 2023-10-17 12:48:46 +01:00
Simon Larsen
35db6e95ad add httponly cookie to status page 2023-10-17 12:10:50 +01:00
Simon Larsen
17208b5e26 fix token cookie on master page 2023-10-16 21:16:11 +01:00
Simon Larsen
896dce3430 add pycache to gitignore 2023-10-16 20:54:21 +01:00
Simon Larsen
d844fa9df2 fix api 2023-10-16 20:06:21 +01:00
Simon Larsen
48542c4323 fix typo 2023-10-16 19:26:13 +01:00
Simon Larsen
f57047c778 fix sttaus page api 2023-10-16 19:01:45 +01:00
Simon Larsen
e471787462 set cookie for host 2023-10-16 18:15:25 +01:00
Hammad
dc4721f878 fixes lint errors 2023-10-16 22:01:05 +05:00
Simon Larsen
4bd4dbf3c1 fix domains cookie set on status page 2023-10-16 17:41:49 +01:00
Hammad
6c0c79dd25 removes line that will never be run
this brings code coverage to 100%
2023-10-16 21:14:36 +05:00
Hammad
a9548858b0 adds tests for CommonServer/Middleware/BearerTokenAuthorization 2023-10-16 21:12:31 +05:00
Simon Larsen
6804e94850 add ingestor status check 2023-10-16 12:55:54 +01:00
Simon Larsen
63736aed6c fix path 2023-10-16 12:53:15 +01:00
Simon Larsen
c848032fdc add otel to helm chart 2023-10-16 12:41:27 +01:00
Simon Larsen
22c2231e22 make llama work with rest api 2023-10-16 11:45:15 +01:00
Simon Larsen
7a063d741c fix otel collector 2023-10-16 11:06:11 +01:00
Simon Larsen
8a9cc10ff0 fix otel collector 2023-10-16 11:04:47 +01:00
Simon Larsen
2e43fa0c02 Merge pull request #820 from hasannadeem/test/side-menu-item-and-dictionary-of-string-components
Added tests for SideMenuItem and DictionaryOfStrings components
2023-10-16 10:54:11 +01:00
Simon Larsen
f51a1828ab fix fmt 2023-10-15 21:41:49 +01:00
Simon Larsen
805139055a make llama work 2023-10-15 21:04:58 +01:00
Simon Larsen
42c85b16e7 delete nodejs adaptor from llama 2023-10-15 18:34:25 +01:00
Simon Larsen
a59742cddb add python app for llama. 2023-10-15 18:14:15 +01:00
Simon Larsen
ba426b5580 fix llama 2023-10-14 17:59:52 +01:00
Simon Larsen
1945bbfd45 fix llama compile err 2023-10-14 16:45:17 +01:00
Simon Larsen
58debb9959 male llama work 2023-10-14 16:36:12 +01:00
hasan
6485f474b2 Add tests for SideMenuItem and DictionaryOfStrings components 2023-10-14 18:11:51 +05:00
Simon Larsen
301d7f124c fix fmt 2023-10-14 12:01:06 +01:00
Simon Larsen
985217d2bf rename helm files for ingestor 2023-10-13 20:14:04 +01:00
Simon Larsen
20f46177cb Merge branch 'master' of github.com-simon:OneUptime/oneuptime 2023-10-13 19:49:57 +01:00
Simon Larsen
0453d995ba if no ingestor then connect to oneuptime 2023-10-13 19:49:20 +01:00
Simon Larsen
8ab7fbc95d Merge pull request #819 from OneUptime/make-auth-secure
Make auth secure
2023-10-13 19:46:45 +01:00
Simon Larsen
2d99850596 fix fmt 2023-10-13 19:46:19 +01:00
Simon Larsen
c77b8e2d57 fix common server tests 2023-10-13 19:45:24 +01:00
Simon Larsen
e12f73cebf basic form fix 2023-10-13 19:38:06 +01:00
Simon Larsen
0351480152 fix fmt 2023-10-13 19:12:28 +01:00
Simon Larsen
62e81bee06 fix server tests 2023-10-13 15:29:29 +01:00
Simon Larsen
11b500058e fix type 2023-10-13 15:08:53 +01:00
Simon Larsen
0b94b0ff70 fix undef in cookie get 2023-10-13 14:46:12 +01:00
Simon Larsen
9bd8275321 fix fmt 2023-10-13 14:35:49 +01:00
Simon Larsen
a25a1ed0b9 remove licensing jobs 2023-10-13 14:35:35 +01:00
Simon Larsen
accffbe443 add docker build for otel collector 2023-10-13 14:34:35 +01:00
Simon Larsen
414bfdfec1 add otel jobs 2023-10-13 14:33:21 +01:00
Simon Larsen
cc7037b549 Merge pull request #812 from OneUptime/logging
Logging
2023-10-13 13:57:51 +01:00
Simon Larsen
725f41ef1b make auth httponly 2023-10-13 13:25:38 +01:00
Simon Larsen
e7682c826d remove licensing 2023-10-13 13:02:07 +01:00
Simon Larsen
5347bc29ea add height param for custom logo 2023-10-13 11:11:02 +01:00
Simon Larsen
aa975633dd fix height and width for outlook 2023-10-13 11:08:21 +01:00
Simon Larsen
199ef2b009 fix fmt 2023-10-12 20:04:37 +01:00
Simon Larsen
f0f690f24a duplicate workflows 2023-10-12 19:31:36 +01:00
Simon Larsen
ef43088692 Merge branch 'master' into logging 2023-10-12 19:04:14 +01:00
Simon Larsen
8f36524583 fix txt 2023-10-12 19:03:51 +01:00
Simon Larsen
d738d1378c add duplicate monitor functionality. 2023-10-12 18:48:23 +01:00
Simon Larsen
b402450eac add more logs to redis events 2023-10-12 15:16:42 +01:00
Simon Larsen
130d7b1af3 add stack trace limit 2023-10-12 15:09:06 +01:00
Simon Larsen
070fd415ae mark jobs as timed out. 2023-10-12 15:01:06 +01:00
Simon Larsen
d8b6cf98fe fix fmt 2023-10-10 20:36:22 +01:00
Simon Larsen
3a8451aea3 fix fmt 2023-10-10 20:33:48 +01:00
Simon Larsen
0de056c4e9 fix logging 2023-10-10 20:23:48 +01:00
Simon Larsen
8e02cf56ef improve logging. 2023-10-10 20:19:29 +01:00
Simon Larsen
1296707e0d Merge branch 'master' into logging 2023-10-10 19:41:32 +01:00
Simon Larsen
80b848f757 Merge branch 'master' of github.com-simon:OneUptime/oneuptime 2023-10-10 19:41:09 +01:00
Simon Larsen
4b2a9c74c0 Merge branch 'release' 2023-10-10 19:40:58 +01:00
Simon Larsen
62ed9583fc Merge pull request #813 from yashug/fix-584
FIX-584: Fixes SideOver component action buttons to appear without sc…
2023-10-10 17:21:02 +01:00
Simon Larsen
8a2f9f9913 fix pricing page 2023-10-10 16:29:41 +01:00
Simon Larsen
1a1be22b16 fix fmt 2023-10-10 16:24:35 +01:00
yashug
7e48cb2451 FIX-584: Fixes SideOver component action buttons to appear without scroll 2023-10-10 19:25:33 +05:30
Simon Larsen
8fc2f93e94 refactor otel middleware 2023-10-10 14:51:59 +01:00
Simon Larsen
1892d06cec gzip working 2023-10-10 14:41:20 +01:00
Simon Larsen
32960b90f8 get gzip body 2023-10-10 13:49:48 +01:00
Simon Larsen
7bd5efee1c remove sonar cloud 2023-10-10 11:25:51 +01:00
Simon Larsen
f1b4214379 add proto files 2023-10-10 11:16:22 +01:00
Simon Larsen
ff0314ae9b add collector to docker compose 2023-10-09 19:21:38 +01:00
Simon Larsen
591c89a320 add otel collector to nginx 2023-10-09 19:18:46 +01:00
Simon Larsen
60b1f3bcc5 fix probe ingest response 2023-10-09 19:07:25 +01:00
Simon Larsen
7e796dff42 probeapi request 2023-10-09 19:04:39 +01:00
Simon Larsen
34817a1066 probe-api to ingestor 2023-10-09 18:54:23 +01:00
Simon Larsen
45bdadde87 add otel collector 2023-10-09 18:50:37 +01:00
Simon Larsen
753e017efd Merge branch 'master' into logging 2023-10-09 16:50:36 +01:00
Simon Larsen
6b80d76fda Merge pull request #802 from yashug/rewrite-error-boundary
Rewrites ErrorBoundary as class Component to catch errors
2023-10-09 13:38:27 +01:00
yashug
7daa955528 uses react-error-boudnary library to support functional component 2023-10-09 16:44:56 +05:30
Simon Larsen
ff9117ab05 add profile pic change to global event 2023-10-09 11:55:52 +01:00
Simon Larsen
4405c5fe10 fix model progress 2023-10-09 11:17:25 +01:00
yashug
98a1ae95b8 Merge branch 'master' into rewrite-error-boundary 2023-10-09 15:29:07 +05:30
Simon Larsen
bcc06324c3 Merge branch 'release' of github.com-simon:OneUptime/oneuptime into release 2023-10-08 15:04:51 +01:00
Simon Larsen
d7f2432a0c fix billing for free customers 2023-10-08 15:03:50 +01:00
Simon Larsen
e75c16b6f8 fix delete link 2023-10-08 14:01:16 +01:00
Simon Larsen
c13291c33c Merge pull request #811 from yashug/test/Common/Utils
Adds Test cases for Common/Utils to make 100% coverage
2023-10-07 19:23:30 +01:00
yashug
824fb68395 Adds Test cases for Common/Utils to make 100% coverage 2023-10-07 21:27:41 +05:30
Simon Larsen
e0795f24fc Merge pull request #808 from AaronDewes/patch-1
Fix typo in "expressions"
2023-10-07 15:09:13 +01:00
Aaron Dewes
0d178843e4 Fix typo in "expressions" 2023-10-06 22:59:23 +02:00
Simon Larsen
b6f8dbabc5 Merge pull request #805 from yashug/test-master-topsection-pages
Adds test cases for MasterPage & TopSection
2023-10-06 20:03:02 +01:00
Simon Larsen
78e97b815d Merge pull request #806 from OneUptime/master
Release
2023-10-06 17:38:40 +01:00
yashug
9ff18d6df5 Adds test cases for MasterPage & TopSection 2023-10-06 21:14:34 +05:30
yashug
9fec234b07 Rewrites ErrorBoundary as class Component to catch errors 2023-10-06 19:45:05 +05:30
Simon Larsen
5fa633959f Merge pull request #801 from Gift-Stack/test-404-page
Add test file for 404 page
2023-10-06 14:02:51 +01:00
GiFTED
0b4373edcc Add test file for 404 page 2023-10-06 13:58:49 +01:00
Simon Larsen
be09a9354d fix method 2023-10-06 12:47:16 +00:00
Simon Larsen
d6549cd861 fix local file 2023-10-06 12:41:29 +00:00
Simon Larsen
3ab1758f17 Merge branch 'master' of github.com:OneUptime/oneuptime 2023-10-06 12:07:02 +00:00
Simon Larsen
138a42326f add mkdir -p for status page certs 2023-10-06 12:07:00 +00:00
Simon Larsen
c6689d2a36 Merge pull request #798 from Gift-Stack/update-lint
Update lint
2023-10-06 09:42:24 +01:00
Simon Larsen
3376c2cb96 Merge pull request #800 from Gift-Stack/test-color-viewer
Create test for the color viewer component
2023-10-06 09:40:55 +01:00
GiFTED
d6e7b5840b Create test for the color viewer component 2023-10-06 01:34:39 +01:00
GiFTED
0319e43a21 restrict null asserted optional chain 2023-10-05 20:51:02 +01:00
GiFTED
d9be0a76e3 restrict null asserted optional chain 2023-10-05 20:50:52 +01:00
Simon Larsen
75748274c1 Merge pull request #797 from Gift-Stack/fix/no-unused-vars-lint
Fix/no unused vars lint
2023-10-05 18:55:38 +01:00
Simon Larsen
08893110bb Merge branch 'master' of github.com:OneUptime/oneuptime 2023-10-05 17:02:26 +00:00
Simon Larsen
1140eb7270 fix mount paths 2023-10-05 17:02:22 +00:00
GiFTED
b526306780 Cleanup unused variables 2023-10-05 18:01:59 +01:00
GiFTED
dc235464f4 Make active 2023-10-05 17:37:17 +01:00
GiFTED
f0413ac917 Make active 2023-10-05 17:36:59 +01:00
Simon Larsen
b00ce0e894 make docs clearer 2023-10-05 17:07:46 +01:00
Simon Larsen
2c90d8c0be fix docs for JS expression 2023-10-05 17:04:51 +01:00
Simon Larsen
7ccb5fca6f Merge pull request #796 from OneUptime/parse-json-on-monitors
Parse json on monitors
2023-10-05 17:01:42 +01:00
Simon Larsen
b65d452632 fix fmt 2023-10-05 17:01:16 +01:00
Simon Larsen
c040d60da9 fix fmt for js expression 2023-10-05 16:56:55 +01:00
Simon Larsen
6fe40bc630 fix js workflow 2023-10-05 16:53:24 +01:00
Simon Larsen
c85d9b8372 add docs 2023-10-05 15:59:10 +01:00
Simon Larsen
58cfe477c2 write docs 2023-10-05 14:40:10 +01:00
Simon Larsen
6d5cb57813 add code expression 2023-10-05 14:28:11 +01:00
Simon Larsen
4761c747a4 Merge branch 'master' into parse-json-on-monitors 2023-10-05 13:49:34 +01:00
Simon Larsen
7ca8d20c4d disable probe 2 2023-10-05 12:31:19 +00:00
Simon Larsen
59a77c6c15 Merge branch 'master' of github.com:OneUptime/oneuptime 2023-10-05 08:49:37 +00:00
Simon Larsen
099af1f5fe fix probe url in helm chart 2023-10-05 08:49:35 +00:00
Simon Larsen
d06c657a16 fix fmt 2023-10-05 09:29:14 +01:00
Simon Larsen
1d9abe8af0 update values in helm 2023-10-05 08:24:37 +00:00
Simon Larsen
9bf46fbcf1 refactor workflow file 2023-10-04 20:02:55 +01:00
Simon Larsen
91adc172bd Merge branch 'master' into parse-json-on-monitors 2023-10-04 19:36:55 +01:00
Simon Larsen
2d4bb56ffa exit node process on app init fail 2023-10-04 19:22:25 +01:00
Simon Larsen
01b677ec77 fix hostnames 2023-10-04 19:15:20 +01:00
Simon Larsen
ed7708ba7c remove change in config from npm 2023-10-04 19:11:51 +01:00
Simon Larsen
1d7980f3ba fix criteria filter 2023-10-04 19:11:15 +01:00
Simon Larsen
43069791da add values 2023-10-04 18:10:15 +00:00
Simon Larsen
ccedb52acd refactor criteria filter 2023-10-04 16:26:07 +01:00
Simon Larsen
f0d69b8ca0 add js expression 2023-10-04 16:18:52 +01:00
Simon Larsen
4359e8fa30 fix readme 2023-10-04 14:06:40 +00:00
Simon Larsen
b1162446db fix secrets 2023-10-04 13:45:24 +00:00
Simon Larsen
cf80324382 add secrets 2023-10-04 13:45:11 +00:00
Simon Larsen
f4e372cfce Merge branch 'master' of github.com:OneUptime/oneuptime 2023-10-04 13:44:54 +00:00
Simon Larsen
b47e95f836 add readme 2023-10-04 13:06:02 +00:00
Simon Larsen
e4af38dfa9 add create default clickhouse db on connect 2023-10-04 13:54:48 +01:00
Simon Larsen
e51ed0edc0 Merge branch 'release' of github.com-simon:OneUptime/oneuptime into release 2023-10-04 12:17:10 +01:00
Simon Larsen
746b396e4f fix ordering of elements 2023-10-04 11:20:08 +01:00
Simon Larsen
1d08ffb130 fix is master admin 2023-10-04 10:56:44 +01:00
Simon Larsen
231493b335 fix website request 2023-10-04 10:28:15 +01:00
Simon Larsen
1cbd9d2d26 Merge pull request #787 from OneUptime/master
Release clickhouse service
2023-10-04 09:54:47 +01:00
Simon Larsen
661e1b8b4d Merge pull request #786 from OneUptime/clickhouse-service
Clickhouse service
2023-10-03 16:06:45 +01:00
Simon Larsen
4a0ff353e6 fix fmt 2023-10-03 16:06:13 +01:00
Simon Larsen
36cbc22327 fix fmt 2023-10-03 13:54:55 +01:00
Simon Larsen
bbd57c917e make find work 2023-10-03 13:29:12 +01:00
Simon Larsen
f72d5550cf delete by 2023-10-02 16:49:58 +01:00
Simon Larsen
5920b97c6c add delete by 2023-10-02 16:48:50 +01:00
Simon Larsen
2c8019bfc6 add upate query 2023-10-02 16:37:40 +01:00
Simon Larsen
d3e3b7d918 fix docker file 2023-10-02 16:17:43 +01:00
Simon Larsen
f2d02c4a5a fix fmt 2023-10-02 16:07:49 +01:00
Simon Larsen
209ac74643 fix analytics service 2023-10-02 15:59:18 +01:00
Simon Larsen
2a6cb19405 add pnpm to docker build to speed up builds 2023-10-02 12:38:40 +01:00
Simon Larsen
62db38520f add find by 2023-10-02 12:13:37 +01:00
Simon Larsen
31a41fed60 fix fmt 2023-10-02 11:06:26 +01:00
Simon Larsen
55fd9a87b9 fix fmt 2023-10-02 11:04:36 +01:00
Simon Larsen
91ed99f256 fix import statement 2023-10-02 10:59:42 +01:00
Simon Larsen
6f4963cdb0 fix common ui 2023-10-02 10:54:53 +01:00
Simon Larsen
9b6667e6c7 fix analytics model 2023-10-02 10:53:00 +01:00
Simon Larsen
b42b93844b fix compile err 2023-10-02 10:32:31 +01:00
Simon Larsen
ca63e6fbfb Merge pull request #785 from kashalls/check-if-curl-installed
Add curl to configure.sh dependency install
2023-10-01 21:16:51 +01:00
Jordan Jones
515b8ba94c chore(tests): sneak in the tiny misspelling 2023-10-01 08:29:47 -07:00
Jordan Jones
e9bdf80f84 chore(configure): add curl to install dependencies
This helps protect cases where new containers don't have curl installed by default.
2023-10-01 08:28:11 -07:00
Simon Larsen
31bcfc7531 fix compile err 2023-10-01 14:52:56 +01:00
Simon Larsen
4046da0523 fix model permissions for clickhouse 2023-10-01 14:35:46 +01:00
Simon Larsen
e728501ddb add create by 2023-10-01 12:46:51 +01:00
Simon Larsen
c06c0f8b38 fix helm test 2023-10-01 09:13:44 +00:00
Simon Larsen
433e776d05 fix test 2023-10-01 09:03:07 +00:00
Simon Larsen
8e9a5a1077 Merge branch 'master' of github.com:OneUptime/oneuptime 2023-10-01 08:31:28 +00:00
Simon Larsen
8a892e643f fix helm readme 2023-10-01 08:31:26 +00:00
Simon Larsen
7ea9f48089 add kube readme url 2023-10-01 09:05:02 +01:00
Simon Larsen
6fd882afd4 copy readme to helm package 2023-09-30 08:16:02 +01:00
Simon Larsen
917fb112d4 rename artifacthub repo 2023-09-29 21:56:22 +01:00
Simon Larsen
a02d5d420a fix helm chart deploy 2023-09-29 21:43:18 +01:00
Simon Larsen
35916a5836 Sign Helm Chart 2023-09-29 17:57:50 +01:00
Simon Larsen
df68a5e76a add readme to helm release 2023-09-29 17:40:32 +01:00
Simon Larsen
54462d4975 add gpg sign key 2023-09-29 16:24:10 +01:00
Simon Larsen
1441f3a345 fix readme 2023-09-29 16:08:28 +01:00
Simon Larsen
41c6898d30 fix encoding 2023-09-29 15:51:14 +01:00
Simon Larsen
0d09047454 add endpoint telemetry 2023-09-29 14:26:09 +01:00
Simon Larsen
8a3b893521 add status check script 2023-09-29 14:24:59 +01:00
Simon Larsen
0c13463c0d move artifact repo outside 2023-09-29 14:17:25 +01:00
Simon Larsen
36860e6ee9 fix status check scirpt 2023-09-29 14:16:09 +01:00
Simon Larsen
53efbaf7a0 add bash and curl to test docker 2023-09-29 13:59:08 +01:00
Simon Larsen
8863c6a209 add chmod to scripts 2023-09-29 13:52:56 +01:00
Simon Larsen
c069ae47a0 fix js lint 2023-09-29 13:40:41 +01:00
Simon Larsen
4c1e4ad1b2 add repo id 2023-09-29 13:36:21 +01:00
Simon Larsen
82a1c3c93b fix helm lint 2023-09-29 13:31:17 +01:00
Simon Larsen
52e4a1247f fix readme 2023-09-29 13:22:08 +01:00
Simon Larsen
deec3cb6f4 fix readme 2023-09-29 13:06:38 +01:00
Simon Larsen
2a6afd155f fix readme 2023-09-29 13:03:28 +01:00
Simon Larsen
7bd6c14313 add http protocol to values 2023-09-29 13:00:09 +01:00
Simon Larsen
374a7e8267 remove host from admin dash 2023-09-29 11:25:35 +00:00
Simon Larsen
4a1b6ce89e remove host menu from admin dash 2023-09-29 11:25:07 +00:00
Simon Larsen
8a22320532 fix use https back to env var 2023-09-29 11:24:45 +00:00
Simon Larsen
5634b7b586 update docs 2023-09-29 11:16:48 +00:00
Simon Larsen
a399648093 remove values file 2023-09-29 11:13:30 +00:00
Simon Larsen
f927433fb7 add encode and decode 2023-09-29 11:12:09 +00:00
Simon Larsen
55ca86efbd fix values 2023-09-29 11:10:20 +00:00
Simon Larsen
1be4e59319 remove encode and decode 2023-09-29 11:09:50 +00:00
Simon Larsen
c62b3568cd fix path 2023-09-29 11:03:19 +00:00
Simon Larsen
de3333f0e8 fix 2023-09-29 11:02:24 +00:00
Simon Larsen
4c9e4cbb14 values 2023-09-29 11:01:43 +00:00
Simon Larsen
0d142a51a9 fix docs 2023-09-29 11:01:06 +00:00
Simon Larsen
ef4a630c1b add metal lb config 2023-09-29 10:57:00 +00:00
Simon Larsen
475da718b9 update readme 2023-09-29 10:52:42 +01:00
Simon Larsen
2d52cca8c3 remove helm route 2023-09-29 10:49:59 +01:00
Simon Larsen
0bb9ba47b8 add readme 2023-09-29 10:48:45 +01:00
Simon Larsen
2cf1b2f54b fix fmt 2023-09-29 10:15:54 +01:00
Simon Larsen
1e0444ee41 Merge pull request #784 from OneUptime/helm-package
Helm package
2023-09-29 10:11:21 +01:00
Simon Larsen
26aaee83b8 remove helm chart 2023-09-29 10:10:57 +01:00
Simon Larsen
06a76ecc2b add more readme 2023-09-29 09:56:13 +01:00
Simon Larsen
9867ef4824 remove pkg chart 2023-09-29 09:51:37 +01:00
Simon Larsen
c8a4ff73f3 fix tmp 2023-09-29 09:45:51 +01:00
Simon Larsen
623acf9212 fix temp 2023-09-29 09:43:54 +01:00
Simon Larsen
4a9ef40d8f fix dir nav 2023-09-29 09:39:37 +01:00
Simon Larsen
961034dd3f clone repo 2023-09-29 09:10:17 +01:00
Simon Larsen
b7263ae132 encode and decode url 2023-09-29 09:07:21 +01:00
Simon Larsen
0b3e45b8d9 add ls 2023-09-29 09:04:06 +01:00
Simon Larsen
5cf93f7173 list dir 2023-09-29 08:59:44 +01:00
Simon Larsen
e89db96254 fix dir nav 2023-09-29 08:54:22 +01:00
Simon Larsen
663eb5ec6d remove cd 2023-09-29 08:50:03 +01:00
Simon Larsen
b9a0c0297e make publish work 2023-09-29 08:48:06 +01:00
Simon Larsen
264fcce55c update ver of actions checkout 2023-09-29 08:42:43 +01:00
Simon Larsen
32ed2f3183 move ssh key up 2023-09-29 08:41:10 +01:00
Simon Larsen
4547fbe872 add ssh key 2023-09-29 08:32:20 +01:00
Simon Larsen
bb809874fd fix clone repo 2023-09-29 08:28:50 +01:00
Simon Larsen
8e71d86651 add ssh key 2023-09-29 08:25:31 +01:00
Simon Larsen
85840ddeb4 fix clone 2023-09-29 08:21:06 +01:00
Simon Larsen
799beb5303 add PAT to job 2023-09-29 08:13:31 +01:00
Simon Larsen
9cfe6f49e0 add lint 2023-09-28 22:06:21 +01:00
Simon Larsen
f7cf41cab4 add github token to clone 2023-09-28 22:04:18 +01:00
Simon Larsen
493e898647 unset extra header 2023-09-28 22:00:44 +01:00
Simon Larsen
f2f0e10ffe add checkout 2023-09-28 21:52:17 +01:00
Simon Larsen
12c555dfe7 package help chart 2023-09-28 21:49:49 +01:00
Simon Larsen
83bbd26eef Merge branch 'master' of github.com:OneUptime/oneuptime 2023-09-28 20:33:21 +00:00
Simon Larsen
55f8a32352 fix packaing 2023-09-28 20:33:00 +00:00
Simon Larsen
d7081c1bae add bash 2023-09-28 21:27:11 +01:00
Simon Larsen
6cd897c134 fix head request type with err status code 2023-09-28 19:00:41 +01:00
Simon Larsen
36cfc317a4 fix typo in file 2023-09-28 18:57:15 +01:00
Simon Larsen
44f2b3ce13 fix repo name 2023-09-28 16:05:10 +00:00
Simon Larsen
f0dd4ec980 add artifact repo file 2023-09-28 14:46:38 +00:00
Simon Larsen
e4604b69e4 add load balancer 2023-09-28 14:40:33 +00:00
Simon Larsen
22bc05b32d Merge branch 'new-helm' 2023-09-28 11:33:38 +00:00
Simon Larsen
9fe70fc864 fix fmt 2023-09-28 11:33:16 +00:00
Simon Larsen
24b9d301c8 fix typos 2023-09-28 11:26:50 +00:00
Simon Larsen
5ad45237f5 add restart policy 2023-09-28 11:24:33 +00:00
Simon Larsen
b05cecc6b4 Merge pull request #782 from OneUptime/new-helm
New helm
2023-09-28 12:21:27 +01:00
Simon Larsen
ab6ca0e52f Merge branch 'master' into new-helm 2023-09-28 12:21:20 +01:00
Simon Larsen
3c15f2dc53 Merge pull request #763 from OneUptime/dependabot/npm_and_yarn/AdminDashboard/json5-1.0.2
Bump json5 from 1.0.1 to 1.0.2 in /AdminDashboard
2023-09-28 12:20:26 +01:00
Simon Larsen
d96e00789c Merge pull request #780 from OneUptime/dependabot/npm_and_yarn/HelmChart/get-func-name-2.0.2
Bump get-func-name from 2.0.0 to 2.0.2 in /HelmChart
2023-09-28 12:20:18 +01:00
Simon Larsen
47ade5164c Merge pull request #781 from OneUptime/dependabot/npm_and_yarn/Licensing/get-func-name-2.0.2
Bump get-func-name from 2.0.0 to 2.0.2 in /Licensing
2023-09-28 12:20:11 +01:00
Simon Larsen
7f386e4c71 add test 2023-09-28 11:19:13 +00:00
Simon Larsen
9ee710a16c add tests for nginx 2023-09-28 11:18:00 +00:00
Simon Larsen
3714c2c91a add test container 2023-09-28 11:16:50 +00:00
Simon Larsen
074c02729b fix status check 2023-09-28 11:09:07 +00:00
Simon Larsen
1dc5584821 fix port name 2023-09-28 10:43:07 +00:00
Simon Larsen
cf10f0df52 remove host port 2023-09-28 10:16:19 +00:00
dependabot[bot]
9893a7b8ad Bump get-func-name from 2.0.0 to 2.0.2 in /Licensing
Bumps [get-func-name](https://github.com/chaijs/get-func-name) from 2.0.0 to 2.0.2.
- [Release notes](https://github.com/chaijs/get-func-name/releases)
- [Commits](https://github.com/chaijs/get-func-name/commits/v2.0.2)

---
updated-dependencies:
- dependency-name: get-func-name
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-27 22:08:13 +00:00
dependabot[bot]
bbcdf93699 Bump get-func-name from 2.0.0 to 2.0.2 in /HelmChart
Bumps [get-func-name](https://github.com/chaijs/get-func-name) from 2.0.0 to 2.0.2.
- [Release notes](https://github.com/chaijs/get-func-name/releases)
- [Commits](https://github.com/chaijs/get-func-name/commits/v2.0.2)

---
updated-dependencies:
- dependency-name: get-func-name
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-27 22:07:55 +00:00
Simon Larsen
a001766d15 random secrets for helm 2023-09-27 19:52:20 +00:00
Simon Larsen
3576e86532 terminate if app init fails 2023-09-27 20:48:50 +01:00
Simon Larsen
588ab1e925 redis fix param 2023-09-27 19:46:25 +00:00
Simon Larsen
f09260b374 add 443 port to nginx 2023-09-27 19:34:09 +00:00
Simon Larsen
43e75c342a fix probe ports 2023-09-27 19:32:24 +00:00
Simon Larsen
08411211ab fix fmt 2023-09-27 17:00:02 +01:00
Simon Larsen
f510fdae5c fix nginx env vars 2023-09-27 16:46:38 +01:00
Simon Larsen
0301f5ff42 fix status page auth 2023-09-27 16:46:08 +01:00
Simon Larsen
013a49bd3a Merge pull request #779 from OneUptime/new-helm
New helm
2023-09-27 13:43:20 +01:00
Simon Larsen
b4f2e738ea fix nginx 2023-09-27 12:39:10 +00:00
Simon Larsen
ac0bc23e1a add oneuptime secret to probe 2023-09-27 12:21:29 +00:00
Simon Larsen
666de177c8 add multiple probes 2023-09-27 12:07:28 +00:00
Simon Larsen
99579677a1 rename comment 2023-09-27 11:55:25 +00:00
Simon Larsen
8b25a006a9 fix file service 2023-09-27 11:44:00 +00:00
Simon Larsen
5e82b0f158 add file service 2023-09-27 11:42:27 +00:00
Simon Larsen
b05294b386 remove deprecated and template 2023-09-27 10:25:32 +00:00
Simon Larsen
984a77c9e7 Merge pull request #778 from OneUptime/new-helm
New helm chart
2023-09-27 11:22:14 +01:00
Simon Larsen
864d0b3c00 fix fmt 2023-09-27 09:25:41 +01:00
Simon Larsen
14d22e5f12 harden invite emails 2023-09-27 09:24:43 +01:00
Simon Larsen
614ba797b6 add isServer to nginx 2023-09-26 18:18:27 +00:00
Simon Larsen
fcbe7fc5f0 fix compile err 2023-09-26 17:59:18 +00:00
Simon Larsen
9ce81a2704 add test values 2023-09-26 16:57:46 +00:00
Simon Larsen
88a997afaa fix env vars 2023-09-26 16:15:04 +00:00
Simon Larsen
7c5dc7b35c add env vars to nginx 2023-09-26 16:11:04 +00:00
Simon Larsen
3dd150692a fix authenticated exception 2023-09-26 16:26:36 +01:00
Simon Larsen
f09e129fd1 add ports 2023-09-26 15:08:05 +00:00
Simon Larsen
c9b15dcfc7 Merge pull request #774 from OneUptime/new-helm
New helm
2023-09-26 13:42:51 +01:00
Simon Larsen
b039a5a045 fix fmt 2023-09-26 12:42:32 +00:00
Simon Larsen
3040b21484 trim hostname 2023-09-26 12:34:13 +00:00
Simon Larsen
af15c6f5f5 add probe api in helpers 2023-09-26 12:23:57 +00:00
Simon Larsen
4c82c922e2 fix database env vars 2023-09-26 11:37:26 +00:00
Simon Larsen
08f48ad082 fix port serialization 2023-09-26 11:29:24 +00:00
Simon Larsen
34a8ea806d limit_max for subscribers 2023-09-26 11:21:07 +01:00
Simon Larsen
a70e98f802 fix status page reset password 2023-09-26 11:06:59 +01:00
Simon Larsen
ee1ec87781 fix status page private user invites 2023-09-26 10:33:44 +01:00
Simon Larsen
f21de699dd fix helm chart 2023-09-25 15:07:43 +00:00
Simon Larsen
b30f9a472a fix helm env vars 2023-09-25 14:40:11 +00:00
Simon Larsen
e7c54b369d fix env vars 2023-09-25 13:25:50 +00:00
Simon Larsen
53bf92fac0 add size and pvc 2023-09-25 12:52:42 +00:00
Simon Larsen
6da56df5b1 fix env vars 2023-09-25 12:42:21 +00:00
Simon Larsen
931cccf86a add ports 2023-09-25 11:44:57 +00:00
Simon Larsen
027966cae3 add common env vars 2023-09-25 11:15:00 +00:00
Simon Larsen
f568473588 add env to notifications 2023-09-25 11:12:22 +00:00
Simon Larsen
ada26e3cce update readme 2023-09-25 10:47:23 +00:00
Simon Larsen
15e2c9cef2 add ingress and haraka 2023-09-25 10:34:26 +00:00
Simon Larsen
a091cd4faa Add deps 2023-09-25 10:04:07 +00:00
Simon Larsen
1fa5604cdd add deps 2023-09-25 09:08:55 +00:00
Simon Larsen
d9ed5f579e fix issues 2023-09-25 08:32:29 +00:00
Simon Larsen
0138e98506 fix ports 2023-09-24 13:13:24 +00:00
Simon Larsen
2feb024032 add more service files 2023-09-24 13:04:01 +00:00
Simon Larsen
55bf11bfd1 refactor into helpers 2023-09-24 12:28:45 +00:00
Simon Larsen
05d6dd2182 add service template 2023-09-24 12:18:58 +01:00
Simon Larsen
3595f5bf6f add helm lint command 2023-09-24 12:07:31 +01:00
Simon Larsen
398c08854a refactor helm service 2023-09-24 09:14:24 +01:00
Simon Larsen
af8d85f6d2 refactor pod env 2023-09-23 13:49:13 +01:00
Simon Larsen
296dfd15d5 add common ui 2023-09-23 13:46:22 +01:00
Simon Larsen
efc446edf1 add more secrets to helm 2023-09-23 12:35:24 +00:00
Simon Larsen
8453d32a4f add common env to home 2023-09-23 13:03:33 +01:00
Simon Larsen
41a8ddb09a remove deprecated docs 2023-09-23 12:52:41 +01:00
Simon Larsen
9f5fa3542a refactor readme 2023-09-23 12:51:16 +01:00
Simon Larsen
b801aba506 add custom probes to growth plan 2023-09-23 12:44:42 +01:00
Simon Larsen
c6e5d642b5 fix: Monitor remains permanently disabled if you delete an active incident before resolving it 2023-09-23 12:29:41 +01:00
Simon Larsen
002abb7498 add retry logic to register a probe 2023-09-22 21:30:10 +01:00
Simon Larsen
0e141b9b1a add retry logic to register a probe 2023-09-22 21:28:55 +01:00
Simon Larsen
230ccc4144 add template 2023-09-22 17:51:03 +00:00
Simon Larsen
ac4d2cc9ec install oneuptime home 2023-09-22 17:02:34 +00:00
Simon Larsen
a1f12fd14a update readme 2023-09-22 17:34:03 +01:00
Simon Larsen
3e4ad34179 add microk8s readme 2023-09-22 17:19:10 +01:00
Simon Larsen
a21bde486b add readme 2023-09-22 16:55:36 +01:00
Simon Larsen
4adb2b58ca add helm chart readme 2023-09-22 16:54:04 +01:00
Simon Larsen
929c39dea7 remove unneeded files 2023-09-22 16:49:56 +01:00
Simon Larsen
206c7d9bf1 add try catch in cron time parser 2023-09-22 16:36:04 +01:00
Simon Larsen
e16c9cb3b7 fix fmt 2023-09-22 13:04:54 +01:00
Simon Larsen
542fb4355e add cache option for lint 2023-09-22 12:53:26 +01:00
Simon Larsen
f63b910d78 add more probe logs 2023-09-22 12:49:51 +01:00
Simon Larsen
8254b635fb add probe logs 2023-09-22 12:34:02 +01:00
dependabot[bot]
e20229ca9d Bump json5 from 1.0.1 to 1.0.2 in /AdminDashboard
Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2.
- [Release notes](https://github.com/json5/json5/releases)
- [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md)
- [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2)

---
updated-dependencies:
- dependency-name: json5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-13 07:42:17 +00:00
763 changed files with 21815 additions and 22607 deletions

View File

@@ -6,6 +6,7 @@ node_modules
# dependencies
/node_modules
node_modules
**/node_modules
.idea
# testing
@@ -53,4 +54,7 @@ tests/coverage
settings.json
GoSDK/tester/
GoSDK/tester/
Llama/Models/*

View File

@@ -87,10 +87,11 @@
],
// https://www.npmjs.com/package/eslint-plugin-unused-imports
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", {"argsIgnorePattern": "^_"}],
"@typescript-eslint/no-extra-non-null-assertion": "error",
"@typescript-eslint/no-floating-promises":"error",
"@typescript-eslint/await-thenable":"error",
"@typescript-eslint/no-non-null-asserted-optional-chain": "error",
"unused-imports/no-unused-imports": "error",
"unused-imports/no-unused-vars": [
"error",
@@ -178,11 +179,13 @@
"ignoreReadBeforeAssign": false
}
],
"no-var": "error"
"no-var": "error",
"object-curly-spacing": ["error", "always"],
"no-unneeded-ternary": "error"
},
"settings": {
"react": {
"version": "18.1.0"
}
}
}
}

View File

@@ -1,20 +0,0 @@
name: Code Analysis
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened]
jobs:
sonarcloud:
name: SonarCloud
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: SonarCloud Scan
uses: SonarSource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

View File

@@ -40,7 +40,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -8,7 +8,21 @@ on:
- 'release'
jobs:
lint:
helm-lint:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Install Helm
run: |
curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
- name: Lint Helm Chart
run: |
helm lint ./HelmChart/Public/oneuptime
js-lint:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}

View File

@@ -151,20 +151,6 @@ jobs:
- run: cd CommonServer && npm install
- run: cd File && npm install && npm run compile
compile-helm-chart:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd Model && npm install
- run: cd CommonServer && npm install
- run: cd HelmChart && npm install && npm run compile
compile-home:
runs-on: ubuntu-latest
env:
@@ -207,20 +193,6 @@ jobs:
- run: cd CommonServer && npm install
- run: cd Integration && npm install && npm run compile
compile-licensing:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd Common && npm install
- run: cd Model && npm install
- run: cd CommonServer && npm install
- run: cd Licensing && npm install && npm run compile
compile-notification:
runs-on: ubuntu-latest
env:
@@ -262,7 +234,7 @@ jobs:
- run: cd CommonServer && npm install
- run: cd Probe && npm install && npm run compile
compile-probe-api:
compile-ingestor:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
@@ -274,7 +246,7 @@ jobs:
- run: cd Common && npm install
- run: cd Model && npm install
- run: cd CommonServer && npm install
- run: cd ProbeAPI && npm install && npm run compile
- run: cd Ingestor && npm install && npm run compile
compile-realtime:

View File

@@ -40,6 +40,21 @@ jobs:
- name: build docker image
run: sudo docker build -f ./LinkShortener/Dockerfile .
docker-build-otel-collector:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image for accounts service
- name: build docker image
run: sudo docker build -f ./OTelCollector/Dockerfile .
docker-build-api-reference:
runs-on: ubuntu-latest
env:
@@ -131,20 +146,6 @@ jobs:
- name: build docker image
run: sudo docker build -f ./Haraka/Dockerfile .
docker-build-helm-chart:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image for home
- name: build docker image
run: sudo docker build -f ./HelmChart/Dockerfile .
docker-build-home:
runs-on: ubuntu-latest
@@ -187,24 +188,10 @@ jobs:
- name: Preinstall
run: npm run prerun
# build image for licensing
# build image for integrations
- name: build docker image
run: sudo docker build -f ./Integration/Dockerfile .
docker-build-licensing:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Preinstall
run: npm run prerun
# build image for licensing
- name: build docker image
run: sudo docker build -f ./Licensing/Dockerfile .
docker-build-notification:
runs-on: ubuntu-latest
@@ -237,7 +224,7 @@ jobs:
- name: build docker image
run: sudo docker build -f ./Probe/Dockerfile .
docker-build-probe-api:
docker-build-ingestor:
runs-on: ubuntu-latest
env:
CI_PIPELINE_ID: ${{github.run_number}}
@@ -250,7 +237,7 @@ jobs:
# build image probe api
- name: build docker image
run: sudo docker build -f ./ProbeAPI/Dockerfile .
run: sudo docker build -f ./Ingestor/Dockerfile .
docker-build-realtime:
runs-on: ubuntu-latest

View File

@@ -11,7 +11,7 @@ jobs:
env:
BASE_URL: http://localhost
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
with:
node-version: 16

View File

@@ -25,7 +25,7 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- run: echo "${{needs.generate-build-number.outputs.build_number}}"
@@ -39,6 +39,54 @@ jobs:
body: |
${{steps.build_changelog.outputs.changelog}}
helm-chart-deploy:
runs-on: ubuntu-latest
needs: generate-build-number
env:
CI_COMMIT_AUTHOR: Continuous Integration
steps:
- name: Install Helm
run: curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- name: Build and Package Helm chart
run: |
cd ..
echo '${{ secrets.GPG_PRIVATE_KEY }}' > private.key
gpg --import private.key || true
rm private.key
echo "GPG key imported successfully"
gpg --export-secret-keys >~/.gnupg/secring.gpg
echo "GPG key exported successfully"
eval `ssh-agent -s`
ssh-add - <<< '${{ secrets.HELM_CHART_GITHUB_REPO_DEPLOY_KEY }}'
git clone git@github.com:OneUptime/helm-chart.git
cd oneuptime/HelmChart/Public
helm lint oneuptime
helm package --sign --key 'key@oneuptime.com' --keyring ~/.gnupg/secring.gpg oneuptime --version 7.0.${{needs.generate-build-number.outputs.build_number}} --app-version 7.0.${{needs.generate-build-number.outputs.build_number}}
echo "Helm Chart Package created successfully"
cd ..
ls
echo "Copying the package to helm-chart repo"
rm -r ../../helm-chart/oneuptime
cp -r ./Public/* ../../helm-chart
echo "Package copied successfully"
cd .. && cd .. && cd helm-chart
echo "Updating helm-chart repo"
git config --global user.name "${{ env.CI_COMMIT_AUTHOR }}"
git config --global user.email "hello@oneuptime.com"
echo "Git config set successfully"
echo "Adding the package to helm-chart repo"
helm repo index .
git add -A
git commit -m "Helm Chart Release 7.0.${{needs.generate-build-number.outputs.build_number}}"
git push origin master
nginx-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -54,7 +102,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -114,7 +162,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -159,6 +207,68 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
otel-collector-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/otel-collector
ghcr.io/oneuptime/otel-collector
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy otel-collector.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./OTelCollector/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
workflow-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -174,7 +284,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -234,7 +344,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -294,7 +404,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -354,7 +464,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -399,6 +509,66 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
test-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/test
ghcr.io/oneuptime/test
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy test.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Tests/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
realtime-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -414,7 +584,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -459,7 +629,7 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
probe-api-docker-image-deploy:
ingestor-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
@@ -468,13 +638,13 @@ jobs:
uses: docker/metadata-action@v4
with:
images: |
oneuptime/probe-api
ghcr.io/oneuptime/probe-api
oneuptime/ingestor
ghcr.io/oneuptime/ingestor
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -491,7 +661,7 @@ jobs:
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy probe-api.
# Build and deploy ingestor.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
@@ -509,7 +679,7 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./ProbeAPI/Dockerfile
file: ./Ingestor/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
@@ -534,7 +704,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -579,66 +749,6 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
licensing-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/licensing
ghcr.io/oneuptime/licensing
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy licensing.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Licensing/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
integrations-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -654,7 +764,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -714,7 +824,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -774,7 +884,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -834,7 +944,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -879,65 +989,6 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
helm-chart-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/helm-chart
ghcr.io/oneuptime/helm-chart
tags: |
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy helm-chart.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./HelmChart/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
haraka-docker-image-deploy:
needs: generate-build-number
@@ -954,7 +1005,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1014,7 +1065,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1075,7 +1126,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1136,7 +1187,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1196,7 +1247,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1256,7 +1307,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1316,7 +1367,7 @@ jobs:
type=raw,value=release,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}},pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}

View File

@@ -35,7 +35,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -96,7 +96,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -157,7 +157,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -202,6 +202,69 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
otel-collector-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/otel-collector
ghcr.io/oneuptime/otel-collector
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy otel-collector.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./OTelCollector/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
link-shortener-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -218,7 +281,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -279,7 +342,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -340,7 +403,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -385,6 +448,69 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
test-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/test
ghcr.io/oneuptime/test
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy test.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Tests/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
realtime-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -401,7 +527,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -446,7 +572,7 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
probe-api-docker-image-deploy:
ingestor-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
@@ -455,14 +581,14 @@ jobs:
uses: docker/metadata-action@v4
with:
images: |
oneuptime/probe-api
ghcr.io/oneuptime/probe-api
oneuptime/ingestor
ghcr.io/oneuptime/ingestor
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -479,7 +605,7 @@ jobs:
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy probe-api.
# Build and deploy ingestor.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
@@ -497,7 +623,7 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./ProbeAPI/Dockerfile
file: ./Ingestor/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
@@ -523,7 +649,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -568,67 +694,6 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
licensing-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/licensing
ghcr.io/oneuptime/licensing
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy licensing.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./Licensing/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
integrations-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
@@ -645,7 +710,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -706,7 +771,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -767,7 +832,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -828,7 +893,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -873,66 +938,6 @@ jobs:
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
helm-chart-docker-image-deploy:
needs: generate-build-number
runs-on: ubuntu-latest
steps:
- name: Docker Meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
oneuptime/helm-chart
ghcr.io/oneuptime/helm-chart
tags: |
type=raw,value=test,enable=true
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
with:
ref: ${{ github.ref }}
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Generate Dockerfile from Dockerfile.tpl
run: npm run prerun
# Build and deploy helm-chart.
- name: Login to Docker Hub
uses: docker/login-action@v2.2.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
file: ./HelmChart/Dockerfile
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
GIT_SHA=${{ github.sha }}
APP_VERSION=7.0.${{needs.generate-build-number.outputs.build_number}}
haraka-docker-image-deploy:
needs: generate-build-number
@@ -950,7 +955,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1011,7 +1016,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1072,7 +1077,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1133,7 +1138,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1194,7 +1199,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1255,7 +1260,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
@@ -1316,7 +1321,7 @@ jobs:
type=semver,value=7.0.${{needs.generate-build-number.outputs.build_number}}-test,pattern={{version}},enable=true
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ github.ref }}

View File

@@ -1,4 +1,4 @@
name: Probe Api Test
name: Ingestor Test
on:
pull_request:
@@ -17,5 +17,5 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 18.3.0
- run: cd ProbeAPI && npm install && npm run test
- run: cd Ingestor && npm install && npm run test

9
.gitignore vendored
View File

@@ -88,3 +88,12 @@ Backups/*.tar
Haraka/dkim/keys/private_base64.txt
Haraka/dkim/keys/public_base64.txt
.eslintcache
HelmChart/Values/*.values.yaml
Llama/Models/tokenizer*
Llama/Models/llama*
Llama/__pycache__/*

20
.vscode/launch.json vendored
View File

@@ -99,8 +99,8 @@
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/ProbeAPI",
"name": "Probe API: Debug with Docker",
"localRoot": "${workspaceFolder}/Ingestor",
"name": "Ingestor: Debug with Docker",
"port": 9932,
"remoteRoot": "/usr/src/app",
"request": "attach",
@@ -125,20 +125,6 @@
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/data-ingestor",
"name": "Data Ingestor: Debug with Docker",
"port": 9338,
"remoteRoot": "/usr/src/app",
"request": "attach",
"skipFiles": [
"<node_internals>/**"
],
"type": "node",
"restart": true,
"autoAttachChildProcesses": true
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/Notification",
@@ -197,7 +183,7 @@
},
{
"address": "127.0.0.1",
"localRoot": "${workspaceFolder}/ProbeAPI",
"localRoot": "${workspaceFolder}/Ingestor",
"name": "Probe API: Debug with Docker",
"port": 9251,
"remoteRoot": "/usr/src/app",

View File

@@ -3,12 +3,10 @@
#
# Pull base image nodejs image.
FROM node:current-alpine AS base
FROM node:current-alpine
USER root
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
RUN npm config set fetch-retry-maxtimeout 6000000
RUN npm config set fetch-retry-mintimeout 1000000
ARG GIT_SHA
ARG APP_VERSION
@@ -26,9 +24,6 @@ SHELL ["/bin/bash", "-c"]
RUN mkdir /usr/src
# Install common
FROM base AS common
WORKDIR /usr/src/Common
COPY ./Common/package*.json /usr/src/Common/
RUN npm install
@@ -36,9 +31,6 @@ COPY ./Common /usr/src/Common
# Install Model
FROM base AS model
WORKDIR /usr/src/Model
COPY ./Model/package*.json /usr/src/Model/
RUN npm install
@@ -46,9 +38,6 @@ COPY ./Model /usr/src/Model
# Install CommonServer
FROM base AS commonserver
WORKDIR /usr/src/CommonServer
COPY ./CommonServer/package*.json /usr/src/CommonServer/
RUN npm install
@@ -59,7 +48,6 @@ COPY ./CommonServer /usr/src/CommonServer
# Install CommonUI
FROM base AS commonui
WORKDIR /usr/src/CommonUI
COPY ./CommonUI/package*.json /usr/src/CommonUI/
RUN npm install --force
@@ -67,22 +55,6 @@ COPY ./CommonUI /usr/src/CommonUI
#SET ENV Variables
# Install app
FROM base AS app
WORKDIR /usr/src/Common
COPY --from=common /usr/src/Common .
WORKDIR /usr/src/Model
COPY --from=model /usr/src/Model .
WORKDIR /usr/src/CommonServer
COPY --from=commonserver /usr/src/CommonServer .
WORKDIR /usr/src/CommonUI
COPY --from=commonui /usr/src/CommonUI .
ENV PRODUCTION=true
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true

View File

@@ -14,11 +14,14 @@ const init: () => Promise<void> = async (): Promise<void> => {
} catch (err) {
logger.error('App Init Failed:');
logger.error(err);
throw err;
}
};
init().catch((err: Error) => {
logger.error(err);
logger.info('Exiting node process');
process.exit(1);
});
export default app;

View File

@@ -108,16 +108,7 @@
<!-- End Google Tag Manager (noscript) -->
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
<script src="/accounts/dist/bundle.js"></script>
<script>
tailwind.config = {

View File

@@ -107,7 +107,7 @@ const LoginPage: () => JSX.Element = () => {
maxPrimaryButtonWidth={true}
footer={
<div className="actions pointer text-center mt-4 hover:underline fw-semibold">
<p>
<div>
{!showSsoTip && (
<div
onClick={() => {
@@ -128,13 +128,13 @@ const LoginPage: () => JSX.Element = () => {
your project.
</div>
)}
</p>
</div>
</div>
}
/>
</div>
<div className="mt-10 text-center">
<p className="text-muted mb-0 text-gray-500">
<div className="text-muted mb-0 text-gray-500">
Don&apos;t have an account?{' '}
<Link
to={new Route('/accounts/register')}
@@ -142,7 +142,7 @@ const LoginPage: () => JSX.Element = () => {
>
Register.
</Link>
</p>
</div>
</div>
</div>
</div>

View File

@@ -16,14 +16,15 @@ export default abstract class LoginUtil {
User
) as User;
const token: string = value['token'] as string;
UserUtil.setAccessToken(token);
UserUtil.setEmail(user.email as Email);
UserUtil.setUserId(user.id as ObjectID);
UserUtil.setName(user.name as Name);
UserUtil.setName(user.name || new Name(''));
UserUtil.setIsMasterAdmin(user.isMasterAdmin as boolean);
if (user.profilePictureId) {
UserUtil.setProfilePicId(user.profilePictureId);
}
Analytics.userAuth(user.email!);
// go to dashboard, user should be logged in.

View File

@@ -3,12 +3,10 @@
#
# Pull base image nodejs image.
FROM node:current-alpine AS base
FROM node:current-alpine
USER root
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
RUN npm config set fetch-retry-maxtimeout 6000000
RUN npm config set fetch-retry-mintimeout 1000000
ARG GIT_SHA
ARG APP_VERSION
@@ -26,18 +24,12 @@ SHELL ["/bin/bash", "-c"]
RUN mkdir /usr/src
# Install common
FROM base AS common
WORKDIR /usr/src/Common
COPY ./Common/package*.json /usr/src/Common/
RUN npm install
COPY ./Common /usr/src/Common
# Install Model
FROM base AS model
WORKDIR /usr/src/Model
COPY ./Model/package*.json /usr/src/Model/
RUN npm install
@@ -45,9 +37,6 @@ COPY ./Model /usr/src/Model
# Install CommonServer
FROM base AS commonserver
WORKDIR /usr/src/CommonServer
COPY ./CommonServer/package*.json /usr/src/CommonServer/
RUN npm install
@@ -58,29 +47,12 @@ COPY ./CommonServer /usr/src/CommonServer
# Install CommonUI
FROM base AS commonui
WORKDIR /usr/src/CommonUI
COPY ./CommonUI/package*.json /usr/src/CommonUI/
RUN npm install --force
COPY ./CommonUI /usr/src/CommonUI
#SET ENV Variables
# Install app
FROM base AS app
WORKDIR /usr/src/Common
COPY --from=common /usr/src/Common .
WORKDIR /usr/src/Model
COPY --from=model /usr/src/Model .
WORKDIR /usr/src/CommonServer
COPY --from=commonserver /usr/src/CommonServer .
WORKDIR /usr/src/CommonUI
COPY --from=commonui /usr/src/CommonUI .
ENV PRODUCTION=true
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true

View File

@@ -13,11 +13,14 @@ const init: () => Promise<void> = async (): Promise<void> => {
} catch (err) {
logger.error('App Init Failed:');
logger.error(err);
throw err;
}
};
init().catch((err: Error) => {
logger.error(err);
logger.info('Exiting node process');
process.exit(1);
});
export default app;

View File

@@ -66,7 +66,7 @@
"moment-timezone": "^0.5.40",
"nanoid": "^3.3.2",
"nanoid-dictionary": "^4.3.0",
"posthog-js": "^1.37.0",
"posthog-js": "^1.77.0",
"process": "^0.11.10",
"reflect-metadata": "^0.1.13",
"slugify": "^1.6.5",
@@ -74,7 +74,7 @@
"uuid": "^8.3.2"
},
"devDependencies": {
"@faker-js/faker": "^6.3.1",
"@faker-js/faker": "^8.0.2",
"@types/jest": "^27.5.2",
"@types/node": "^17.0.22",
"jest": "^27.5.1",
@@ -86,6 +86,7 @@
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"@clickhouse/client": "^0.2.1",
"@elastic/elasticsearch": "^8.1.0",
"@opentelemetry/api": "^1.1.0",
"@opentelemetry/auto-instrumentations-node": "^0.31.0",
@@ -11913,9 +11914,9 @@
"peer": true
},
"node_modules/json5": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.2.tgz",
"integrity": "sha512-46Tk9JiOL2z7ytNQWFLpj99RZkVgeHf87yGQKsIkaPz1qSH9UczKH1rO7K3wgRselo0tYMUNfecYpm/p1vC7tQ==",
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"peer": true,
"bin": {
@@ -17137,9 +17138,9 @@
}
},
"node_modules/tsconfig-paths/node_modules/json5": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"dev": true,
"peer": true,
"dependencies": {
@@ -23463,7 +23464,7 @@
"Common": {
"version": "file:../Common",
"requires": {
"@faker-js/faker": "^6.3.1",
"@faker-js/faker": "^8.0.2",
"@types/crypto-js": "^4.1.1",
"@types/jest": "^27.5.2",
"@types/nanoid-dictionary": "^4.2.0",
@@ -23477,7 +23478,7 @@
"moment-timezone": "^0.5.40",
"nanoid": "^3.3.2",
"nanoid-dictionary": "^4.3.0",
"posthog-js": "^1.37.0",
"posthog-js": "^1.77.0",
"process": "^0.11.10",
"reflect-metadata": "^0.1.13",
"slugify": "^1.6.5",
@@ -23510,6 +23511,7 @@
"CommonServer": {
"version": "file:../CommonServer",
"requires": {
"@clickhouse/client": "^0.2.1",
"@elastic/elasticsearch": "^8.1.0",
"@faker-js/faker": "^6.3.1",
"@opentelemetry/api": "^1.1.0",
@@ -27674,9 +27676,9 @@
"peer": true
},
"json5": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.2.tgz",
"integrity": "sha512-46Tk9JiOL2z7ytNQWFLpj99RZkVgeHf87yGQKsIkaPz1qSH9UczKH1rO7K3wgRselo0tYMUNfecYpm/p1vC7tQ==",
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"peer": true
},
@@ -31459,9 +31461,9 @@
},
"dependencies": {
"json5": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"dev": true,
"peer": true,
"requires": {

View File

@@ -102,16 +102,7 @@
<!-- End Google Tag Manager (noscript) -->
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
<script src="/admin/dist/bundle.js"></script>
<script>
tailwind.config = {

View File

@@ -19,7 +19,6 @@ import Users from './Pages/Users/Index';
import Logout from './Pages/Logout/Logout';
// Settings Pages.
import SettingsHost from './Pages/Settings/Host/Index';
import SettingsEmail from './Pages/Settings/SMTP/Index';
import SettingsCallSMS from './Pages/Settings/CallSMS/Index';
import SettingsProbes from './Pages/Settings/Probes/Index';
@@ -73,12 +72,7 @@ const App: () => JSX.Element = () => {
<PageRoute
path={RouteMap[PageMap.SETTINGS]?.toString() || ''}
element={<SettingsHost />}
/>
<PageRoute
path={RouteMap[PageMap.SETTINGS_HOST]?.toString() || ''}
element={<SettingsHost />}
element={<SettingsAuthentication />}
/>
<PageRoute

View File

@@ -9,12 +9,21 @@ import UserUtil from 'CommonUI/src/Utils/User';
import Navigation from 'CommonUI/src/Utils/Navigation';
import { ACCOUNTS_URL } from 'CommonUI/src/Config';
import UiAnalytics from 'CommonUI/src/Utils/Analytics';
import ErrorMessage from 'CommonUI/src/Components/ErrorMessage/ErrorMessage';
const Logout: FunctionComponent = (): ReactElement => {
useEffect(() => {
const [error, setError] = React.useState<string | null>(null);
const logout: Function = async () => {
UiAnalytics.logout();
UserUtil.logout();
await UserUtil.logout();
Navigation.navigate(ACCOUNTS_URL);
};
useEffect(() => {
logout().catch((error: Error) => {
setError(error.message || error.toString());
});
}, []);
return (
@@ -35,7 +44,8 @@ const Logout: FunctionComponent = (): ReactElement => {
},
]}
>
<PageLoader isVisible={true} />
{!error ? <PageLoader isVisible={true} /> : <></>}
{error ? <ErrorMessage error={error} /> : <></>}
</Page>
);
};

View File

@@ -11,7 +11,7 @@ const DashboardSideMenu: () => JSX.Element = (): ReactElement => {
return (
<SideMenu>
<SideMenuSection title="Basic">
<SideMenuItem
{/* <SideMenuItem
link={{
title: 'Host',
to: RouteUtil.populateRouteParams(
@@ -19,7 +19,7 @@ const DashboardSideMenu: () => JSX.Element = (): ReactElement => {
),
}}
icon={IconProp.Globe}
/>
/> */}
<SideMenuItem
link={{
title: 'Authentication',

View File

@@ -1,10 +1,8 @@
# Pull base image nodejs image.
FROM node:current-alpine AS base
FROM node:current-alpine
USER root
RUN mkdir /tmp/npm && chmod 2777 /tmp/npm && chown 1000:1000 /tmp/npm && npm config set cache /tmp/npm --global
RUN npm config set fetch-retry-maxtimeout 6000000
RUN npm config set fetch-retry-mintimeout 1000000
ARG GIT_SHA
ARG APP_VERSION
@@ -22,18 +20,12 @@ SHELL ["/bin/bash", "-c"]
RUN mkdir /usr/src
# Install common
FROM base AS common
WORKDIR /usr/src/Common
COPY ./Common/package*.json /usr/src/Common/
RUN npm install
COPY ./Common /usr/src/Common
# Install Model
FROM base AS model
WORKDIR /usr/src/Model
COPY ./Model/package*.json /usr/src/Model/
RUN npm install
@@ -41,9 +33,6 @@ COPY ./Model /usr/src/Model
# Install CommonServer
FROM base AS commonserver
WORKDIR /usr/src/CommonServer
COPY ./CommonServer/package*.json /usr/src/CommonServer/
RUN npm install
@@ -52,16 +41,7 @@ COPY ./CommonServer /usr/src/CommonServer
# Install app
FROM base AS app
WORKDIR /usr/src/Common
COPY --from=common /usr/src/Common .
WORKDIR /usr/src/Model
COPY --from=model /usr/src/Model .
WORKDIR /usr/src/CommonServer
COPY --from=commonserver /usr/src/CommonServer .
ENV PRODUCTION=true

View File

@@ -93,11 +93,14 @@ const init: () => Promise<void> = async (): Promise<void> => {
} catch (err) {
logger.error('App Init Failed:');
logger.error(err);
throw err;
}
};
init().catch((err: Error) => {
logger.error(err);
logger.info('Exiting node process');
process.exit(1);
});
export default app;

View File

@@ -1,4 +1,4 @@
import { ColumnAccessControl } from 'Common/Types/Database/AccessControl/AccessControl';
import { ColumnAccessControl } from 'Common/Types/BaseDatabase/AccessControl';
import { getTableColumns } from 'Common/Types/Database/TableColumn';
import Dictionary from 'Common/Types/Dictionary';
import ObjectID from 'Common/Types/ObjectID';

View File

@@ -1,25 +0,0 @@
# Setup Production Server
### Run the production.yml file.
run the file specific to the project with their specific names.
`kubectl create -f production.yml`
# Known Issues
### Issue 1
Sometimes you'll see this error
```
$ kubectl create -f staging.yaml
error: SchemaError(io.k8s.api.apps.v1beta2.DeploymentCondition): invalid object doesn't have additional properties
```
**Solution:**
Run the kubectl command with validate false
```
$ kubectl create -f staging.yaml --validate=false
```

View File

@@ -1,27 +0,0 @@
#########
#Since Kubernetes jobs are immitable (you cannot update it with a new image).
# To update these jobs, CI deletes the old jobs and recreates them using this file.
#########
#########
#UPDATE: Any update to this file should also be accompanied with ../test.yaml
#########
########-InitScript-##########
apiVersion: batch/v1
kind: Job
metadata:
name: InitScript
spec:
template:
spec:
containers:
- name: InitScript
image: localhost:32000/InitScript:test
imagePullPolicy: Always
env:
- name: MONGO_URL
value: 'mongodb://admin:372b60f4-704c-4205-8e5c-45cdbf44b1fc@mongo-0.mongo.default.svc.cluster.local:27017,mongo-1.mongo.default.svc.cluster.local:27017,mongo-2.mongo.default.svc.cluster.local:27017/oneuptimedb?replicaSet=rs0'
restartPolicy: Never
---
###########################

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
chmod +x ./ci/scripts/hashexist.sh
if [[ $CI_COMMIT_BRANCH != "master" ]] && [[ $CI_COMMIT_BRANCH != "release" ]]
then
next_stage="skip"
# the first argument is always the job name ($1)
for ((i = 2; i <= $#; i++ ))
do
hash_found=`./ci/scripts/hashexist.sh $1 ${!i}`
if [[ $hash_found == *"false"* ]]
then
next_stage="continue"
fi
done
echo $next_stage
fi

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env bash
echo "
This script npm install's the every project
"
function clean_install {
echo "Installing $1"
cd $1
rm package-lock.json
rm -rf node_modules
npm install
npm audit fix
cd ..
echo "Complete $1"
echo ""
}
clean_install dashboard
clean_install accounts
clean_install backend
clean_install home
clean_install StatusPage
clean_install ApiReference
clean_install probe
clean_install AdminDashboard
clean_install InitScript
clean_install licensing
clean_install HelmChart
clean_install JavaScriptSDK
clean_install .

View File

@@ -1,52 +0,0 @@
#
sudo dpkg --configure -a
echo "Running Cleanup Script..."
if [[ $(which helm) ]]
then
# Remove oneuptime if helm is installed
echo "RUNNING COMMAND: sudo helm uninstall oneuptime || echo 'oneuptime not installed'"
sudo helm uninstall oneuptime || echo 'oneuptime not installed'
fi
if [[ $(which microk8s) ]]
then
# Stop microk8s VM
echo "Stopping microk8s..."
# Delete microk8s cluster so it can be fresh for next job.
echo "Delete microk8s Cluster..."
echo "RUNNING COMMAND: sudo usermod -a -G microk8s $USER"
sudo usermod -a -G microk8s $USER || echo "microk8s group not found"
echo "RUNNING COMMAND: microk8s.reset || 'microk8s cannot delete'"
sudo microk8s.reset || 'microk8s cannot delete'
echo "RUNNING COMMAND: microk8s.kubectl delete all --all || 'microk8s.kubectl cannot delete'"
sudo microk8s.kubectl delete all --all || 'microk8s.kubectl cannot delete'
echo "RUNNING COMMAND: microk8s.stop || 'microk8s cannot Stop'"
sudo microk8s.stop || "microk8s cannot Stop"
echo "RUNNING COMMAND: sudo snap remove microk8s || 'microk8s cannot be removed.'"
sudo snap remove microk8s || 'microk8s cannot be removed.'
fi
if [[ $(which docker) ]]
then
# Stop all docker containers
echo "Stop and Delete all docker containers..."
echo "RUNNING COMMAND: sudo docker stop \$(sudo docker ps -aq) || echo 'No docker containers'"
sudo docker stop $(sudo docker ps -aq) || echo 'No docker containers'
# Remove all docker containers.
echo "RUNNING COMMAND: sudo docker rm \$(sudo docker ps -aq) || echo 'No docker containers'"
sudo docker rm $(sudo docker ps -aq) || echo 'No docker containers'
# Delete all locally built images. (Comment this out to reduce build times)
# echo "RUNNING COMMAND: sudo docker rmi -f \$(sudo docker images -q) || echo 'No docker containers'"
# sudo docker rmi -f $(sudo docker images -q) || echo 'No docker containers'
# Comment line below to reduce build times.
# sudo docker system prune -a --volumes --force
fi
# fix broken unmet dependencies
sudo apt --fix-broken install -y -y
# remove any service holding port 80
sudo apt remove apache2 nginx -y
sudo apt purge apache2 nginx -y
sudo apt autoremove -y

View File

@@ -1,5 +0,0 @@
curl -X POST "https://api.cloudflare.com/client/v4/zones/${CF_ZONE}/purge_cache" \
-H "X-Auth-Email: ${CF_EMAIL}" \
-H "X-Auth-Key: ${CF_API_KEY}" \
-H "Content-Type: application/json" \
--data '{"purge_everything":true}'

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env bash
echo "Connect machine with to communicate with aws cluster"
# This command will automatically switch to the oneuptime-production cluster
# AWS command.
#sudo aws eks update-kubeconfig --region $AWS_DEFAULT_REGION --name fyipe-production
doctl kubernetes cluster kubeconfig save 5c53f2a7-e462-48ab-9c02-3fbe281b2568

View File

@@ -1,40 +0,0 @@
#!/usr/bin/env bash
# Install Kubectl
curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl"
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
sudo kubectl version --client
# fix dpkg interruption
sudo dpkg --configure -a
# fix broken unmet dependencies
sudo apt --fix-broken install -y -y
# # Install and configure aws cli
# sudo apt-get install -y unzip
# curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" # download latest aws cli version
# unzip awscliv2.zip
# sudo ./aws/install
# aws --version # confirm installation
# # Remove any already existing ~/.aws, /root/.kube or /root/.config directory
# sudo rm -rf ~/.aws || echo "Directory already deleted"
# sudo rm -rf /root/.config || echo "Directory already deleted"
# sudo rm -rf /root/.kube || echo "Directory already deleted"
# # Configure aws cli
# sudo aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID
# sudo aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY
# sudo aws configure set default.region $AWS_DEFAULT_REGION
# sudo aws configure set default.output json
# Install doctl.
wget https://github.com/digitalocean/doctl/releases/download/v1.71.0/doctl-1.71.0-linux-amd64.tar.gz
tar xf doctl-1.71.0-linux-amd64.tar.gz
sudo mv doctl /usr/local/bin
# Setup access token
doctl auth init -t $DIGITAL_OCEAN_API_KEY

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env bash
echo "Connect machine with to communicate with aws cluster"
# This command will automatically switch to the oneuptime-staging cluster
sudo aws eks update-kubeconfig --region $AWS_DEFAULT_REGION --name fyipe-staging

View File

@@ -1,54 +0,0 @@
#!/usr/bin/env bash
echo "
======== IMPORTANT! =========
This script will take ~30+ mins to complete.
- Builds a docker container
- This script takes a long time to run when you run it for the first time
- Next subsequent executions would be a lot faster.
"
if [[ ! $(which docker) && ! $(docker --version) ]]
then
echo -e "\033[91mPlease install Docker. https://docs.docker.com/install"
exit
fi
if [[ ! $(which git) && ! $(docker --git) ]]
then
echo -e "\033[91mPlease install Git. https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
exit
fi
DIR=$PWD
ONEUPTIME_DIR="$DIR/.."
chmod +x ./ci/scripts/docker-build-and-push.sh
function build {
./ci/scripts/docker-build-and-push.sh $1 $2
}
# cd ..
build dashboard $1
build accounts $1
build backend $1
build home $1
build StatusPage $1
build ApiReference $1
build probe $1
build AdminDashboard $1
build InitScript $1
build slack $1
build licensing $1
build HelmChart $1
build LighthouseRunner $1
build ScriptRunner $1
build ContainerScanner $1
build ApplicationScanner $1
build data-ingestor $1
build realtime $1
build haraka $1
build HttpTestServer $1
cd $DIR

View File

@@ -1,6 +0,0 @@
cd $1
echo "Building $1"
sudo docker build -t oneuptime/$1:$2 .
echo "Pushing $1"
sudo docker push oneuptime/$1:$2
cd ..

View File

@@ -1,19 +0,0 @@
#Install Docker and setup registry and insecure access to it.
#IF docker is already installed, do not install docker.
if [[ ! $(which docker) ]]
then
echo "INSTALLING DOCKER"
sudo apt-get update
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
gnupg \
lsb-release
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
fi

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env bash
# install jq only when it does not exist
function hashExist {
# $1 is the job name
# $2 is the project
if [[ ! $(which jq) ]]
then
sudo apt-get install -y jq
fi
PROJECT_HASH=`find $2 -type f ! -path "*node_modules*" ! -path "*build*" -print0 | sort -z | xargs -0 sha256sum | sha256sum`
HASH_VALUE=`echo $PROJECT_HASH$1 | sha256sum | head -c 64`
RESPONSE=`curl -H "Content-Type: application/json" -d "{\"structuredQuery\": {\"from\": {\"collectionId\": \"builds\"},\"where\": {\"compositeFilter\": {\"op\": \"AND\",\"filters\": [{\"fieldFilter\": {\"field\": {\"fieldPath\": \"project\"},\"op\": \"EQUAL\",\"value\": {\"stringValue\": '$2'}}},{\"fieldFilter\": {\"field\": {\"fieldPath\": \"hash\"},\"op\": \"EQUAL\",\"value\": {\"stringValue\": '$HASH_VALUE'}}}]}}}}" -X POST "https://firestore.googleapis.com/v1/projects/oneuptime-devops/databases/(default)/documents:runQuery"`
# if response contains an array of object with document key, then the hash already exist in db
document=`jq '.[0].document' <<< "$RESPONSE"`
if [[ $document == null ]]
then
echo false
else
echo true
fi
}
hashExist $1 $2

View File

@@ -1,31 +0,0 @@
sudo sed -i '/accounts/c\' /etc/hosts
ACCOUNTS_IP=`sudo k describe svc oneuptime-accounts | grep Endpoints | cut -d ":" -f 2`
echo $ACCOUNTS_IP' accounts.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/AdminDashboard/c\' /etc/hosts
ADMIN_DASHBOARD_IP=`sudo k describe svc oneuptime-admin | grep Endpoints | cut -d ":" -f 2`
echo $ADMIN_DASHBOARD_IP' admin.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/dashboard/c\' /etc/hosts
DASHBOARD_IP=`sudo k describe svc oneuptime-dashboard | grep Endpoints | cut -d ":" -f 2`
echo $DASHBOARD_IP' dashboard.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/backend/c\' /etc/hosts
BACKEND_IP=`sudo k describe svc oneuptime-backend | grep Endpoints | cut -d ":" -f 2`
echo $BACKEND_IP' backend.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/home/c\' /etc/hosts
HOME_IP=`sudo k describe svc oneuptime-home | grep Endpoints | cut -d ":" -f 2`
echo $HOME_IP' home.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/StatusPage/c\' /etc/hosts
STATUSPAGE_IP=`sudo k describe svc oneuptime-status | grep Endpoints | cut -d ":" -f 2`
echo $STATUSPAGE_IP' status.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/ApiReference/c\' /etc/hosts
ApiReference_IP=`sudo k describe svc oneuptime-ApiReference | grep Endpoints | cut -d ":" -f 2`
echo $ApiReference_IP' ApiReference.app.local' | sudo tee -a /etc/hosts
sudo sed -i '/licensing/c\' /etc/hosts
LICENSING_IP=`sudo k describe svc oneuptime-licensing | grep Endpoints | cut -d ":" -f 2`
echo $LICENSING_IP' licensing.app.local' | sudo tee -a /etc/hosts

View File

@@ -1,9 +0,0 @@
echo "Installing helm..."
sudo https://baltocdn.com/helm/signing.asc | sudo apt-key add -
sudo apt-get install apt-transport-https --yes
sudo echo "deb https://baltocdn.com/helm/stable/debian/ all main" | sudo tee /etc/apt/sources.list.d/helm-stable-debian.list
sudo apt-get update
sudo apt-get install helm
echo "Install helm complete."

View File

@@ -1,2 +0,0 @@
sudo apt-get install -y jq
curl -s -S --header "PRIVATE-TOKEN: $PERSONAL_ACCESS_TOKEN" "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines/$CI_PIPELINE_ID/jobs?per_page=50" | jq -c ".[] | select(.name==\"$1\") | .status"

View File

@@ -1,53 +0,0 @@
#!/usr/bin/env bash
echo "
This script rollbacks every project if any of the deployment fails
"
chmod +x ./ci/scripts/job-status.sh
function rollback {
export status=`./ci/scripts/job-status.sh production_$1`
if [[ $status == \"success\" ]]
then
echo "Rolling back $1"
sudo kubectl rollout undo deployment/$1
else
echo "Rollback skipped $1"
fi
}
function check {
export status=`./ci/scripts/job-status.sh production_$1`
if [[ $status == \"failed\" ]]
then
echo "Deployment unsuccessful for $1, rolling back all new deployments"
rollback dashboard
rollback accounts
rollback backend
rollback home
rollback StatusPage
rollback ApiReference
rollback probe
rollback AdminDashboard
rollback licensing
rollback HelmChart
rollback slack
exit 1
else
echo "$1 Deployment successful"
fi
}
check dashboard
check accounts
check backend
check home
check StatusPage
check ApiReference
check probe-1
check probe-2
check AdminDashboard
check licensing
check InitScript
check slack
check HelmChart

View File

@@ -1,26 +0,0 @@
# Wait for all the services to come online.
echo "RUNNING COMMAND: echo 'Wait for 10 mins....'"
echo 'Wait for 10 mins....'
echo "RUNNING COMMAND: sleep 10m"
sleep 10m
# Get the status of all the kubernetes resources for debugging purposes.
echo "RUNNING COMMAND: sudo k get pods"
sudo k get pods
echo "RUNNING COMMAND: sudo k get services"
sudo k get services
echo "RUNNING COMMAND: sudo k get rc"
sudo k get rc
echo "RUNNING COMMAND: sudo k get deployments"
sudo k get deployments
echo "RUNNING COMMAND: sudo k get statefulset"
sudo k get statefulset
echo "RUNNING COMMAND: sudo k get pv"
sudo k get pv
echo "RUNNING COMMAND: sudo k get pvc"
sudo k get pvc
echo "RUNNING COMMAND: sudo k get storageclass"
sudo k get storageclass
echo "RUNNING COMMAND: sudo k cluster-info"
sudo k cluster-info
echo "RUNNING COMMAND: sudo k get all --all-namespaces"
sudo k get all --all-namespaces

View File

@@ -1,36 +0,0 @@
# Cleanup
echo "RUNNING COMMAND: chmod +x ./ci/cleanup.sh"
chmod +x ./ci/scripts/cleanup.sh
echo "RUNNING COMMAND: ./ci/cleanup.sh"
./ci/scripts/cleanup.sh
#Install Docker and setup registry and insecure access to it.
#IF docker is already installed, do not install docker.
if [[ ! $(which docker) ]]
then
echo "RUNNING COMMAND: curl -sSL https://get.docker.com/ | sh"
curl -sSL https://get.docker.com/ | sh
echo "RUNNING COMMAND: sudo touch /etc/docker/daemon.json"
sudo touch /etc/docker/daemon.json
echo "RUNNING COMMAND: echo -e "{\n "insecure-registries": ["localhost:32000"]\n}" | sudo tee -a /etc/docker/daemon.json >> /dev/null"
echo -e "{\n "insecure-registries": ["localhost:32000"]\n}" | sudo tee -a /etc/docker/daemon.json >> /dev/null
echo "RUNNING COMMAND: sudo systemctl restart docker"
sudo systemctl restart docker
fi
# Install packages.
echo "RUNNING COMMAND: sudo apt-get update -y && sudo apt-get install -y bash git sudo nodejs"
sudo apt-get update -y && sudo apt-get install -y bash git sudo nodejs
# Install additional dependencies for puppeteer.
echo "RUNNING COMMAND: sudo apt-get install -yq gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \
libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \
libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 \
libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \
ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils
"
sudo apt-get install -yq gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 \
libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 \
libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 \
libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \
ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget

View File

@@ -1,19 +0,0 @@
##############
# IMPORTANT:
# This script sets the CI/CD machine up to run a build job. It's usually the first script that runs
##############
# Cleanup
echo "RUNNING COMMAND: chmod +x ./ci/cleanup.sh"
chmod +x ./ci/scripts/cleanup.sh
echo "RUNNING COMMAND: ./ci/cleanup.sh"
./ci/scripts/cleanup.sh
# Setup Machine.
echo "RUNNING COMMAND: chmod +x ./HelmChart/public/install.sh"
chmod +x ./HelmChart/public/install.sh
echo "RUNNING COMMAND: ./HelmChart/public/install.sh"
./HelmChart/public/install.sh ci-install $1
# For dpkg interruption
sudo dpkg --configure -a

View File

@@ -1,8 +0,0 @@
# This will download, build and package docker containers.
echo "RUNNING COMMAND: chmod +x ./ci/docker-build-all-and-push.sh"
chmod +x ./ci/scripts/docker-build-all-and-push.sh
./ci/scripts/docker-build-all-and-push.sh test
# Setup Kubernetes Cluster
chmod +x ./ci/scripts/setup-cluster.sh
echo "RUNNING COMMAND: ./ci/setup-cluster.sh"
./ci/scripts/setup-cluster.sh

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env bash
echo "
This script rollbacks every project if any of the deployment fails
"
chmod +x ./ci/scripts/job-status.sh
function rollback {
export status=`./ci/scripts/job-status.sh staging_$1`
if [[ $status == \"success\" ]]
then
echo "Rolling back $1"
sudo kubectl rollout undo deployment/fi-$1
if [[ $1 == \"probe\" ]]
then
echo "Rolling back probe1"
sudo kubectl rollout undo deployment/fi-probe1
sudo kubectl rollout undo deployment/fi-probe2
fi
else
echo "Rollback skipped $1"
fi
}
function check {
export status=`./ci/scripts/job-status.sh staging_$1`
if [[ $status == \"failed\" ]]
then
echo "Deployment unsuccessful for $1, rolling back all new deployments"
rollback dashboard
rollback accounts
rollback backend
rollback home
rollback StatusPage
rollback ApiReference
rollback probe
rollback AdminDashboard
rollback licensing
rollback HelmChart
rollback slack
exit 1
else
echo "$1 Deployment successful"
fi
}
check dashboard
check accounts
check backend
check home
check StatusPage
check ApiReference
check probe-1
check probe-2
check AdminDashboard
check licensing
check slack
check HelmChart

View File

@@ -1,23 +0,0 @@
#!/usr/bin/env bash
chmod +x ./ci/scripts/hashexist.sh
function storeHash {
# $1 -> Job Name; $2 -> Project
PROJECT_HASH=`find $2 -type f ! -path "*node_modules*" ! -path "*build*" -print0 | sort -z | xargs -0 sha256sum | sha256sum`
HASH_VALUE=`echo $PROJECT_HASH$1 | sha256sum | head -c 64`
curl -H "Content-Type: application/json" -d "{\"fields\": {\"project\": {\"stringValue\": '$2'},\"hash\": {\"stringValue\": '$HASH_VALUE'}}}" -X POST "https://firestore.googleapis.com/v1/projects/oneuptime-devops/databases/(default)/documents/builds"
}
if [[ $CI_COMMIT_BRANCH != "master" ]] && [[ $CI_COMMIT_BRANCH != "release" ]]
then
# the first argument is always the job name ($1)
for ((i = 2; i <= $#; i++ ))
do
hash_exist=`./ci/scripts/hashexist.sh $1 ${!i}`
if [[ $hash_exist == *"false"* ]]
then
storeHash $1 ${!i}
fi
done
fi

View File

@@ -1,35 +0,0 @@
#!/usr/bin/env bash
echo "
This script changes version of every project
"
function version {
cd $1
npm version "6.0.$CI_PIPELINE_ID"
cd ..
}
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -
sudo apt-get install -y nodejs
version dashboard
version accounts
version backend
version home
version StatusPage
version ApiReference
version probe
version AdminDashboard
version InitScript
version licensing
version HelmChart
version JavaScriptSDK
version oneuptime-le-store
version oneuptime-acme-http-01
version LighthouseRunner
version ScriptRunner
version ContainerScanner
version ApplicationScanner
version data-ingestor
version realtime
version ProbeAPI
version .

View File

@@ -0,0 +1,42 @@
# Some basic commands for Clickhouse
## Show tables in the database
```sql
show tables from oneuptime
```
## Show table structure
```sql
DESCRIBE TABLE oneuptime.Span
```
## Show table data
```sql
select * from table_name
```
## Delete table data
```sql
truncate table_name
```
## Delete table
```sql
drop table oneuptime.table_name
```
## Insert for nested data
```sql
INSERT INTO opentelemetry_spans (trace_id, span_id, attributes.key, attributes.value) VALUES
('trace1', 'span1', ['key1', 'key2'], ['value1', 'value2']),
('trace2', 'span2', ['keyA', 'keyB'], ['valueA', 'valueB']);
```

View File

@@ -0,0 +1,261 @@
import TableColumnType from '../Types/AnalyticsDatabase/TableColumnType';
import AnalyticsTableColumn from '../Types/AnalyticsDatabase/TableColumn';
import BadDataException from '../Types/Exception/BadDataException';
import AnalyticsTableEngine from '../Types/AnalyticsDatabase/AnalyticsTableEngine';
import ColumnBillingAccessControl from '../Types/BaseDatabase/ColumnBillingAccessControl';
import TableBillingAccessControl from '../Types/BaseDatabase/TableBillingAccessControl';
import { TableAccessControl } from '../Types/BaseDatabase/AccessControl';
import EnableWorkflowOn from '../Types/BaseDatabase/EnableWorkflowOn';
import ObjectID from '../Types/ObjectID';
import CommonModel from './CommonModel';
export default class AnalyticsDataModel extends CommonModel {
public constructor(data: {
tableName: string;
singularName: string;
pluralName: string;
tableEngine?: AnalyticsTableEngine | undefined;
tableColumns: Array<AnalyticsTableColumn>;
allowAccessIfSubscriptionIsUnpaid?: boolean | undefined;
tableBillingAccessControl?: TableBillingAccessControl | undefined;
accessControl?: TableAccessControl | undefined;
primaryKeys: Array<string>; // this should be the subset of tableColumns
enableWorkflowOn?: EnableWorkflowOn | undefined;
}) {
super({
tableColumns: data.tableColumns,
});
const columns: Array<AnalyticsTableColumn> = [...data.tableColumns];
this.tableName = data.tableName;
if (data.tableEngine) {
this.tableEngine = data.tableEngine;
}
columns.push(
new AnalyticsTableColumn({
key: '_id',
title: 'ID',
description: 'ID of this object',
required: true,
type: TableColumnType.ObjectID,
})
);
columns.push(
new AnalyticsTableColumn({
key: 'createdAt',
title: 'Created',
description: 'Date and Time when the object was created.',
required: true,
type: TableColumnType.Date,
})
);
columns.push(
new AnalyticsTableColumn({
key: 'updatedAt',
title: 'Updated',
description: 'Date and Time when the object was updated.',
required: true,
type: TableColumnType.Date,
})
);
if (!data.primaryKeys || data.primaryKeys.length === 0) {
throw new BadDataException('Primary keys are required');
}
// check if primary keys are subset of tableColumns
data.primaryKeys.forEach((primaryKey: string) => {
const column: AnalyticsTableColumn | undefined = columns.find(
(column: AnalyticsTableColumn) => {
return column.key === primaryKey;
}
);
if (!column) {
throw new BadDataException(
'Primary key ' + primaryKey + ' is not part of tableColumns'
);
}
if (!column.required) {
throw new BadDataException(
'Primary key ' +
primaryKey +
' is not required. Primary keys must be required.'
);
}
});
this.primaryKeys = data.primaryKeys;
this.tableColumns = columns;
this.singularName = data.singularName;
this.pluralName = data.pluralName;
this.tableBillingAccessControl = data.tableBillingAccessControl;
this.allowAccessIfSubscriptionIsUnpaid =
data.allowAccessIfSubscriptionIsUnpaid || false;
this.accessControl = data.accessControl;
this.enableWorkflowOn = data.enableWorkflowOn;
// initialize Arrays.
for (const column of this.tableColumns) {
if (column.type === TableColumnType.NestedModel) {
this.setColumnValue(column.key, []);
}
}
}
private _enableWorkflowOn: EnableWorkflowOn | undefined;
public get enableWorkflowOn(): EnableWorkflowOn | undefined {
return this._enableWorkflowOn;
}
public set enableWorkflowOn(v: EnableWorkflowOn | undefined) {
this._enableWorkflowOn = v;
}
private _accessControl: TableAccessControl | undefined;
public get accessControl(): TableAccessControl | undefined {
return this._accessControl;
}
public set accessControl(v: TableAccessControl | undefined) {
this._accessControl = v;
}
private _tableName: string = '';
public get tableName(): string {
return this._tableName;
}
public set tableName(v: string) {
this._tableName = v;
}
private _tableEngine: AnalyticsTableEngine = AnalyticsTableEngine.MergeTree;
public get tableEngine(): AnalyticsTableEngine {
return this._tableEngine;
}
public set tableEngine(v: AnalyticsTableEngine) {
this._tableEngine = v;
}
private _primaryKeys: Array<string> = [];
public get primaryKeys(): Array<string> {
return this._primaryKeys;
}
public set primaryKeys(v: Array<string>) {
this._primaryKeys = v;
}
private _singularName: string = '';
public get singularName(): string {
return this._singularName;
}
public set singularName(v: string) {
this._singularName = v;
}
private _pluralName: string = '';
public get pluralName(): string {
return this._pluralName;
}
public set pluralName(v: string) {
this._pluralName = v;
}
private _tableBillingAccessControl: TableBillingAccessControl | undefined;
public get tableBillingAccessControl():
| TableBillingAccessControl
| undefined {
return this._tableBillingAccessControl;
}
public set tableBillingAccessControl(
v: TableBillingAccessControl | undefined
) {
this._tableBillingAccessControl = v;
}
private _allowAccessIfSubscriptionIsUnpaid: boolean = false;
public get allowAccessIfSubscriptionIsUnpaid(): boolean {
return this._allowAccessIfSubscriptionIsUnpaid;
}
public set allowAccessIfSubscriptionIsUnpaid(v: boolean) {
this._allowAccessIfSubscriptionIsUnpaid = v;
}
public getTenantColumn(): AnalyticsTableColumn | null {
const column: AnalyticsTableColumn | undefined = this.tableColumns.find(
(column: AnalyticsTableColumn) => {
return column.isTenantId;
}
);
if (!column) {
return null;
}
return column;
}
public getRequiredColumns(): Array<AnalyticsTableColumn> {
return this.tableColumns.filter((column: AnalyticsTableColumn) => {
return column.required;
});
}
public isDefaultValueColumn(columnName: string): boolean {
const column: AnalyticsTableColumn | null =
this.getTableColumn(columnName);
if (!column) {
return false;
}
return column.isDefaultValueColumn;
}
public getColumnBillingAccessControl(
columnName: string
): ColumnBillingAccessControl | null {
const column: AnalyticsTableColumn | null =
this.getTableColumn(columnName);
if (!column) {
return null;
}
return column.billingAccessControl || null;
}
public get id(): ObjectID | undefined {
return this.getColumnValue('_id') as ObjectID | undefined;
}
public set id(v: ObjectID | undefined) {
this.setColumnValue('_id', v);
}
public get _id(): ObjectID | undefined {
return this.getColumnValue('_id') as ObjectID | undefined;
}
public set _id(v: ObjectID | undefined) {
this.setColumnValue('_id', v);
}
public get createdAt(): Date | undefined {
return this.getColumnValue('createdAt') as Date | undefined;
}
public set createdAt(v: Date | undefined) {
this.setColumnValue('createdAt', v);
}
public get updatedAt(): Date | undefined {
return this.getColumnValue('updatedAt') as Date | undefined;
}
public set updatedAt(v: Date | undefined) {
this.setColumnValue('updatedAt', v);
}
}

View File

@@ -0,0 +1,178 @@
// This model will be extended by BaseModel and Nested Mdoel
import AnalyticsTableColumn from '../Types/AnalyticsDatabase/TableColumn';
import TableColumnType from '../Types/AnalyticsDatabase/TableColumnType';
import OneUptimeDate from '../Types/Date';
import BadDataException from '../Types/Exception/BadDataException';
import { JSONObject, JSONValue } from '../Types/JSON';
import ObjectID from '../Types/ObjectID';
export type RecordValue =
| ObjectID
| string
| number
| boolean
| Date
| Array<number>
| Array<string>
| Array<CommonModel>;
export type Record = Array<RecordValue | Record>;
export default class CommonModel {
protected data: JSONObject = {};
private _tableColumns: Array<AnalyticsTableColumn> = [];
public get tableColumns(): Array<AnalyticsTableColumn> {
return this._tableColumns;
}
public set tableColumns(v: Array<AnalyticsTableColumn>) {
this._tableColumns = v;
}
public setColumnValue(
columnName: string,
value: JSONValue | Array<CommonModel>
): void {
const column: AnalyticsTableColumn | null =
this.getTableColumn(columnName);
if (column) {
if (
column.type === TableColumnType.ObjectID &&
typeof value === 'string'
) {
value = new ObjectID(value);
}
if (
column.type === TableColumnType.Date &&
typeof value === 'string'
) {
value = OneUptimeDate.fromString(value);
}
if (
column.type === TableColumnType.JSON &&
typeof value === 'string'
) {
value = JSON.parse(value);
}
if (
column.type === TableColumnType.Number &&
typeof value === 'string'
) {
value = parseInt(value);
}
// decimal
if (
column.type === TableColumnType.Decimal &&
typeof value === 'string'
) {
value = parseFloat(value);
}
return (this.data[columnName] = value as any);
}
throw new BadDataException('Column ' + columnName + ' does not exist');
}
public constructor(data: { tableColumns: Array<AnalyticsTableColumn> }) {
this.tableColumns = data.tableColumns;
}
public getColumnValue<T extends RecordValue>(
columnName: string
): T | undefined {
if (this.getTableColumn(columnName)) {
return this.data[columnName] as T;
}
return undefined;
}
public getTableColumn(name: string): AnalyticsTableColumn | null {
const column: AnalyticsTableColumn | undefined = this.tableColumns.find(
(column: AnalyticsTableColumn) => {
return column.key === name;
}
);
if (!column) {
return null;
}
return column;
}
public getTableColumns(): Array<AnalyticsTableColumn> {
return this.tableColumns;
}
public fromJSON(json: JSONObject): CommonModel {
for (const key in json) {
this.setColumnValue(key, json[key]);
}
return this;
}
public toJSON(): JSONObject {
const json: JSONObject = {};
this.tableColumns.forEach((column: AnalyticsTableColumn) => {
const recordValue: RecordValue | undefined = this.getColumnValue(
column.key
);
if (recordValue instanceof CommonModel) {
json[column.key] = recordValue.toJSON();
return;
}
if (recordValue instanceof Array) {
if (
recordValue.length > 0 &&
recordValue[0] instanceof CommonModel
) {
json[column.key] = CommonModel.toJSONArray(
recordValue as Array<CommonModel>
);
}
return;
}
json[column.key] = recordValue;
});
return json;
}
public static fromJSONArray<TBaseModel extends CommonModel>(
modelType: { new (): CommonModel },
jsonArray: Array<JSONObject>
): Array<TBaseModel> {
const models: Array<CommonModel> = [];
jsonArray.forEach((json: JSONObject) => {
const model: CommonModel = new modelType();
model.fromJSON(json);
models.push(model);
});
return models as Array<TBaseModel>;
}
public static toJSONArray(models: Array<CommonModel>): Array<JSONObject> {
const json: Array<JSONObject> = [];
models.forEach((model: CommonModel) => {
json.push(model.toJSON());
});
return json;
}
}

View File

@@ -0,0 +1,8 @@
import AnalyticsTableColumn from '../Types/AnalyticsDatabase/TableColumn';
import CommonModel from './CommonModel';
export default class NestedModel extends CommonModel {
public constructor(data: { tableColumns: Array<AnalyticsTableColumn> }) {
super(data);
}
}

View File

@@ -1,104 +0,0 @@
import TableColumnType from '../Types/BaseDatabase/TableColumnType';
import AnalyticsTableColumn from '../Types/AnalyticsDatabase/TableColumn';
import BadDataException from '../Types/Exception/BadDataException';
import AnalyticsTableEngine from '../Types/AnalyticsDatabase/AnalyticsTableEngine';
export default class AnalyticsDataModel {
private _tableColumns: Array<AnalyticsTableColumn> = [];
public get tableColumns(): Array<AnalyticsTableColumn> {
return this._tableColumns;
}
public set tableColumns(v: Array<AnalyticsTableColumn>) {
this._tableColumns = v;
}
private _tableName: string = '';
public get tableName(): string {
return this._tableName;
}
public set tableName(v: string) {
this._tableName = v;
}
private _tableEngine: AnalyticsTableEngine = AnalyticsTableEngine.MergeTree;
public get tableEngine(): AnalyticsTableEngine {
return this._tableEngine;
}
public set tableEngine(v: AnalyticsTableEngine) {
this._tableEngine = v;
}
private _primaryKeys: Array<string> = [];
public get primaryKeys(): Array<string> {
return this._primaryKeys;
}
public set primaryKeys(v: Array<string>) {
this._primaryKeys = v;
}
public constructor(data: {
tableName: string;
tableEngine?: AnalyticsTableEngine | undefined;
tableColumns: Array<AnalyticsTableColumn>;
primaryKeys: Array<string>; // this should be the subset of tableColumns
}) {
const columns: Array<AnalyticsTableColumn> = [...data.tableColumns];
this.tableName = data.tableName;
if (data.tableEngine) {
this.tableEngine = data.tableEngine;
}
columns.push(
new AnalyticsTableColumn({
key: '_id',
title: 'ID',
description: 'ID of this object',
required: true,
type: TableColumnType.ObjectID,
})
);
columns.push(
new AnalyticsTableColumn({
key: 'createdAt',
title: 'Created',
description: 'Date and Time when the object was created.',
required: true,
type: TableColumnType.Date,
})
);
columns.push(
new AnalyticsTableColumn({
key: 'updatedAt',
title: 'Updated',
description: 'Date and Time when the object was updated.',
required: true,
type: TableColumnType.Date,
})
);
if (!data.primaryKeys || data.primaryKeys.length === 0) {
throw new BadDataException('Primary keys are required');
}
// check if primary keys are subset of tableColumns
data.primaryKeys.forEach((primaryKey: string) => {
if (
!columns.find((column: AnalyticsTableColumn) => {
return column.key === primaryKey;
})
) {
throw new BadDataException(
'Primary key ' + primaryKey + ' is not part of tableColumns'
);
}
});
this.primaryKeys = data.primaryKeys;
this.tableColumns = columns;
}
}

View File

@@ -21,24 +21,22 @@ import Email from '../Types/Email';
import Phone from '../Types/Phone';
import PositiveNumber from '../Types/PositiveNumber';
import Route from '../Types/API/Route';
import TableColumnType from '../Types/BaseDatabase/TableColumnType';
import TableColumnType from '../Types/Database/TableColumnType';
import Permission, {
instanceOfUserTenantAccessPermission,
PermissionHelper,
UserPermission,
UserTenantAccessPermission,
} from '../Types/Permission';
import {
ColumnAccessControl,
ColumnBillingAccessControl,
} from '../Types/Database/AccessControl/AccessControl';
import { ColumnAccessControl } from '../Types/BaseDatabase/AccessControl';
import { getColumnAccessControlForAllColumns } from '../Types/Database/AccessControl/ColumnAccessControl';
import BadDataException from '../Types/Exception/BadDataException';
import { PlanSelect } from '../Types/Billing/SubscriptionPlan';
import { EnableWorkflowOn } from '../Types/Model/EnableWorkflow';
import EnableWorkflowOn from '../Types/BaseDatabase/EnableWorkflowOn';
import IconProp from '../Types/Icon/IconProp';
import Text from '../Types/Text';
import { getColumnBillingAccessControlForAllColumns } from '../Types/Database/AccessControl/ColumnBillingAccessControl';
import ColumnBillingAccessControl from '../Types/BaseDatabase/ColumnBillingAccessControl';
export type DbTypes =
| string
@@ -255,6 +253,10 @@ export default class BaseModel extends BaseEntity {
(this as any)[columnName] = value;
}
public removeValue(columnName: string): void {
(this as any)[columnName] = undefined;
}
public doesPermissionHaveConditions(
permission: Permission
): JSONObject | null {

View File

@@ -4,7 +4,7 @@ import ColumnLength from '../Types/Database/ColumnLength';
import ColumnType from '../Types/Database/ColumnType';
import SlugifyColumn from '../Types/Database/SlugifyColumn';
import TableColumn from '../Types/Database/TableColumn';
import TableColumnType from '../Types/BaseDatabase/TableColumnType';
import TableColumnType from '../Types/Database/TableColumnType';
import MimeType from '../Types/File/MimeType';
import ObjectID from '../Types/ObjectID';
import Permission from '../Types/Permission';

View File

@@ -18,8 +18,6 @@ export const IntegrationRoute: Route = new Route('/integration');
export const NotificationRoute: Route = new Route('/notification');
export const HelmRoute: Route = new Route('/helm-chart');
export const AccountsRoute: Route = new Route('/accounts');
export const WorkflowRoute: Route = new Route('/workflow');
@@ -28,4 +26,4 @@ export const ApiReferenceRoute: Route = new Route('/reference');
export const AdminDashboardRoute: Route = new Route('/admin');
export const ProbeApiRoute: Route = new Route('/probe-api');
export const IngestorRoute: Route = new Route('/ingestor');

View File

@@ -0,0 +1,61 @@
import HTTPErrorResponse from '../../../Types/API/HTTPErrorResponse';
describe('HTTPErrorResponse', () => {
it('should return an empty string when data is null', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
404,
{ data: null },
{}
);
expect(httpResponse.message).toBe('');
});
it('should return the message from the "data" property if present', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
200,
{ data: 'Data message' },
{}
);
expect(httpResponse.message).toBe('Data message');
});
it('should return the message from the "message" property if present', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
200,
{ message: 'Message message' },
{}
);
expect(httpResponse.message).toBe('Message message');
});
it('should return the message from the "error" property if no other message properties are present', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
500,
{ error: 'Error message' },
{}
);
expect(httpResponse.message).toBe('Error message');
});
it('should return an empty string when no relevant message properties are present', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
204,
{ otherProperty: 'Other message' },
{}
);
expect(httpResponse.message).toBe('');
});
it('should prioritize "data" > "message" > "error" when multiple message properties are present', () => {
const httpResponse: HTTPErrorResponse = new HTTPErrorResponse(
201,
{
data: 'Data message',
message: 'Message message',
error: 'Error message',
},
{}
);
expect(httpResponse.message).toBe('Data message');
});
});

View File

@@ -0,0 +1,82 @@
import DatabaseDate from '../../../Types/Database/Date';
import moment from 'moment';
import InBetween from '../../../Types/Database/InBetween';
import { JSONObject } from '../../../Types/JSON';
describe('DatabaseDate', () => {
describe('asDateStartOfTheDayEndOfTheDayForDatabaseQuery', () => {
it('should return InBetween object for a valid Date input', () => {
const inputDate: Date = new Date('2023-10-24T12:00:00Z');
const result: JSONObject =
DatabaseDate.asDateStartOfTheDayEndOfTheDayForDatabaseQuery(
inputDate
).toJSON();
const expectedStart: string = moment(inputDate)
.startOf('day')
.format('YYYY-MM-DD HH:mm:ss');
const expectedEnd: string = moment(inputDate)
.endOf('day')
.format('YYYY-MM-DD HH:mm:ss');
expect(result).toEqual({
startValue: expectedStart,
endValue: expectedEnd,
_type: 'InBetween',
});
});
it('should return InBetween object for a valid Date string input', () => {
const inputDate: string = '2023-10-24T12:00:00Z';
const result: JSONObject =
DatabaseDate.asDateStartOfTheDayEndOfTheDayForDatabaseQuery(
inputDate
).toJSON();
const expectedStart: string = moment(inputDate)
.startOf('day')
.format('YYYY-MM-DD HH:mm:ss');
const expectedEnd: string = moment(inputDate)
.endOf('day')
.format('YYYY-MM-DD HH:mm:ss');
expect(result).toEqual({
startValue: expectedStart,
endValue: expectedEnd,
_type: 'InBetween',
});
});
it('should handle invalid date string gracefully', () => {
const inputDate: string = 'invalid-date';
const result: JSONObject =
DatabaseDate.asDateStartOfTheDayEndOfTheDayForDatabaseQuery(
inputDate
).toJSON();
expect(result).toEqual({
startValue: 'Invalid date',
endValue: 'Invalid date',
_type: 'InBetween',
});
});
it('should handle empty string input gracefully', () => {
const inputDate: string = '';
const result: JSONObject =
DatabaseDate.asDateStartOfTheDayEndOfTheDayForDatabaseQuery(
inputDate
).toJSON();
expect(result).toEqual({
startValue: 'Invalid date',
endValue: 'Invalid date',
_type: 'InBetween',
});
});
it('should be a type of InBetween', () => {
const inputDate: string = '2023-10-24T12:00:00Z';
const result: InBetween =
DatabaseDate.asDateStartOfTheDayEndOfTheDayForDatabaseQuery(
inputDate
);
expect(result).toBeInstanceOf(InBetween);
});
});
});

View File

@@ -0,0 +1,70 @@
import EqualToOrNull from '../../../Types/Database/EqualToOrNull';
import BadDataException from '../../../Types/Exception/BadDataException';
import { JSONObject } from '../../../Types/JSON';
describe('EqualToOrNull', () => {
it('should create an EqualToOrNull object with a valid value', () => {
const value: string = 'oneuptime';
const equalObj: EqualToOrNull = new EqualToOrNull(value);
expect(equalObj.value).toBe(value);
});
it('should get the value property of an EqualToOrNull object', () => {
const value: string = 'oneuptime';
const equalObj: EqualToOrNull = new EqualToOrNull(value);
expect(equalObj.value).toBe(value);
});
it('should set the value property of an EqualToOrNull object', () => {
const equalObj: EqualToOrNull = new EqualToOrNull('oldValue');
equalObj.value = 'newValue';
expect(equalObj.value).toBe('newValue');
});
it('should return the correct string representation using toString method', () => {
const equalObj: EqualToOrNull = new EqualToOrNull('oneuptime');
expect(equalObj.toString()).toBe('oneuptime');
});
it('should generate the correct JSON representation using toJSON method', () => {
const equalObj: EqualToOrNull = new EqualToOrNull('oneuptime');
const expectedJSON: JSONObject = {
_type: 'EqualToOrNull',
value: 'oneuptime',
};
expect(equalObj.toJSON()).toEqual(expectedJSON);
});
it('should create an EqualToOrNull object from valid JSON input', () => {
const jsonInput: JSONObject = {
_type: 'EqualToOrNull',
value: 'oneuptime',
};
const equalObj: EqualToOrNull = EqualToOrNull.fromJSON(jsonInput);
expect(equalObj.value).toBe('oneuptime');
});
it('should throw a BadDataException when using invalid JSON input', () => {
const jsonInput: JSONObject = {
_type: 'InvalidType',
value: 'oneuptime',
};
expect(() => {
return EqualToOrNull.fromJSON(jsonInput);
}).toThrow(BadDataException);
});
it('should be a type of EqualToOrNull', () => {
const equalObj: EqualToOrNull = new EqualToOrNull('oneuptime');
expect(equalObj).toBeInstanceOf(EqualToOrNull);
});
it('should handle null value when using fromJSON method', () => {
const jsonInput: JSONObject = {
_type: 'EqualToOrNull',
value: null,
};
const equalObj: EqualToOrNull = EqualToOrNull.fromJSON(jsonInput);
expect(equalObj.value).toBeNull();
});
});

View File

@@ -0,0 +1,80 @@
import InBetween from '../../../Types/Database/InBetween';
import BadDataException from '../../../Types/Exception/BadDataException';
import { JSONObject } from '../../../Types/JSON';
describe('InBetween', () => {
it('should create an InBetween object with valid start and end values', () => {
const startValue: number = 10;
const endValue: number = 20;
const betweenObj: InBetween = new InBetween(startValue, endValue);
expect(betweenObj.startValue).toBe(10);
expect(betweenObj.endValue).toBe(20);
});
it('should generate the correct JSON representation using toJSON method', () => {
const startValue: number = 10;
const endValue: number = 20;
const betweenObj: InBetween = new InBetween(startValue, endValue);
const expectedJSON: JSONObject = {
_type: 'InBetween',
startValue: 10,
endValue: 20,
};
expect(betweenObj.toJSON()).toEqual(expectedJSON);
});
it('should create an InBetween object from valid JSON input', () => {
const jsonInput: JSONObject = {
_type: 'InBetween',
startValue: 10,
endValue: 20,
};
const betweenObj: InBetween = InBetween.fromJSON(jsonInput);
expect(betweenObj.startValue).toBe(10);
expect(betweenObj.endValue).toBe(20);
});
it('should throw a BadDataException when using invalid JSON input', () => {
const jsonInput: JSONObject = {
_type: 'InvalidType',
startValue: 10,
endValue: 20,
};
expect(() => {
return InBetween.fromJSON(jsonInput);
}).toThrow(BadDataException);
});
it('should return a string with start and end values matching', () => {
const startValue: number = 15;
const endValue: number = 15;
const betweenObj: InBetween = new InBetween(startValue, endValue);
expect(betweenObj.toString()).toBe('15');
});
it('should return a string with start and end values different', () => {
const startValue: number = 10;
const endValue: number = 20;
const betweenObj: InBetween = new InBetween(startValue, endValue);
expect(betweenObj.toString()).toBe('10 - 20');
});
it('should return the start value as a string', () => {
const startValue: number = 10;
const endValue: number = 20;
const betweenObj: InBetween = new InBetween(startValue, endValue);
expect(betweenObj.toStartValueString()).toBe('10');
});
it('should return the end value as a string', () => {
const startValue: number = 10;
const endValue: number = 20;
const betweenObj: InBetween = new InBetween(startValue, endValue);
expect(betweenObj.toEndValueString()).toBe('20');
});
it('should be a type of InBetween', () => {
const inBetweenObj: InBetween = new InBetween(10, 15);
expect(inBetweenObj).toBeInstanceOf(InBetween);
});
});

View File

@@ -0,0 +1,52 @@
import BaseModel from '../../Models/BaseModel';
import { JSONObject } from '../../Types/JSON';
import JSONFunctions from '../../Types/JSONFunctions';
describe('JSONFunctions Class', () => {
let baseModel: BaseModel;
beforeEach(() => {
baseModel = new BaseModel();
});
describe('isEmptyObject Method', () => {
test('Returns true for an empty object', () => {
const emptyObj: JSONObject = {};
expect(JSONFunctions.isEmptyObject(emptyObj)).toBe(true);
});
test('Returns false for a non-empty object', () => {
const nonEmptyObj: JSONObject = { key: 'value' };
expect(JSONFunctions.isEmptyObject(nonEmptyObj)).toBe(false);
});
test('Returns true for null or undefined', () => {
expect(JSONFunctions.isEmptyObject(null)).toBe(true);
expect(JSONFunctions.isEmptyObject(undefined)).toBe(true);
});
});
describe('toJSON and fromJSON Methods', () => {
test('toJSON returns a valid JSON object', () => {
const json: JSONObject = JSONFunctions.toJSON(baseModel, BaseModel);
expect(json).toEqual(expect.objectContaining({}));
});
test('toJSONObject returns a valid JSON object', () => {
const json: JSONObject = JSONFunctions.toJSONObject(
baseModel,
BaseModel
);
expect(json).toEqual(expect.objectContaining({}));
});
test('fromJSON returns a BaseModel instance', () => {
const json: JSONObject = { name: 'oneuptime' };
const result: BaseModel | BaseModel[] = JSONFunctions.fromJSON(
json,
BaseModel
);
expect(result).toBeInstanceOf(BaseModel);
});
});
});

View File

@@ -0,0 +1,44 @@
import NotImplementedException from '../../Types/Exception/NotImplementedException';
import { JSONObject } from '../../Types/JSON';
import SerializableObject from '../../Types/SerializableObject';
describe('SerializableObject Class', () => {
let serializableObject: SerializableObject;
beforeEach(() => {
serializableObject = new SerializableObject();
});
test('Constructor initializes an instance of SerializableObject', () => {
expect(serializableObject).toBeInstanceOf(SerializableObject);
});
describe('toJSON Method', () => {
test('Throws NotImplementedException when called', () => {
expect(() => {
return serializableObject.toJSON();
}).toThrow(NotImplementedException);
});
});
describe('fromJSON Method', () => {
test('Throws NotImplementedException when called', () => {
expect(() => {
return SerializableObject.fromJSON({});
}).toThrow(NotImplementedException);
});
});
describe('fromJSON Instance Method', () => {
test('Returns the result from the static fromJSON method', () => {
const json: JSONObject = { key: 'value' };
const expectedResult: SerializableObject = new SerializableObject();
jest.spyOn(SerializableObject, 'fromJSON').mockReturnValue(
expectedResult
);
const result: SerializableObject =
serializableObject.fromJSON(json);
expect(result).toBe(expectedResult);
});
});
});

View File

@@ -349,6 +349,10 @@ const httpMethodTests: Array<HTTPMethodType> = [
name: 'delete',
method: HTTPMethod.DELETE,
},
{
name: 'head',
method: HTTPMethod.HEAD,
},
];
describe.each(httpMethodTests)('$name', ({ name, method }: HTTPMethodType) => {

View File

@@ -0,0 +1,85 @@
import Analytics from '../../Utils/Analytics';
import Email from '../../Types/Email';
import { JSONObject } from '../../Types/JSON';
import posthog from 'posthog-js';
jest.mock('posthog-js', () => {
return {
init: jest.fn(),
identify: jest.fn(),
reset: jest.fn(),
capture: jest.fn(),
};
});
const apiHost: string = 'https://example.com';
const apiKey: string = 'your-api-key';
describe('Analytics Class', () => {
afterEach(() => {
jest.clearAllMocks();
});
it('should initialize the Analytics class', () => {
const analytics: Analytics = new Analytics(apiHost, apiKey);
expect(posthog.init).toHaveBeenCalledWith(apiKey, {
api_host: apiHost,
autocapture: false,
});
expect(analytics.isInitialized).toBe(true);
});
it('should not initialize if apiHost and apiKey are not provided', () => {
const analytics: Analytics = new Analytics('', '');
expect(posthog.init).not.toHaveBeenCalled();
expect(analytics.isInitialized).toBe(false);
});
it('should authenticate a user', () => {
const analytics: Analytics = new Analytics(apiHost, apiKey);
const email: Email = new Email('test@example.com');
analytics.userAuth(email);
expect(posthog.identify).toHaveBeenCalledWith(email.toString());
});
it('should not authenticate a user if not initialized', () => {
const analytics: Analytics = new Analytics('', '');
const email: Email = new Email('test@example.com');
analytics.userAuth(email);
expect(posthog.identify).not.toHaveBeenCalled();
});
it('should reset the user session on logout', () => {
const analytics: Analytics = new Analytics(apiHost, apiKey);
analytics.logout();
expect(posthog.reset).toHaveBeenCalled();
});
it('should not reset the user session if not initialized', () => {
const analytics: Analytics = new Analytics('', '');
analytics.logout();
expect(posthog.reset).not.toHaveBeenCalled();
});
it('should capture an event with optional data', () => {
const analytics: Analytics = new Analytics(apiHost, apiKey);
const eventName: string = 'testEvent';
const data: JSONObject = { key: 'value' };
analytics.capture(eventName, data);
expect(posthog.capture).toHaveBeenCalledWith(eventName, data);
});
it('should not capture an event if not initialized', () => {
const analytics: Analytics = new Analytics('', '');
analytics.capture('testEvent');
expect(posthog.capture).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,34 @@
import {
EVERY_MINUTE,
EVERY_DAY,
EVERY_HOUR,
EVERY_FIVE_MINUTE,
EVERY_FIVE_SECONDS,
EVERY_WEEK,
} from '../../Utils/CronTime';
describe('CronTime', () => {
test('should return every minute', () => {
expect(EVERY_MINUTE).toEqual('* * * * *');
});
test('should return every day', () => {
expect(EVERY_DAY).toEqual('0 8 * * *');
});
test('should return every hour', () => {
expect(EVERY_HOUR).toEqual('1 * * * *');
});
test('should return every five minute', () => {
expect(EVERY_FIVE_MINUTE).toEqual('*/5 * * * *');
});
test('should return every five seconds', () => {
expect(EVERY_FIVE_SECONDS).toEqual('*/5 * * * * *');
});
test('should return every week', () => {
expect(EVERY_WEEK).toEqual('0 0 * * 0');
});
});

View File

@@ -0,0 +1,38 @@
import Faker from '../../Utils/Faker';
import Email from '../../Types/Email';
import Name from '../../Types/Name';
import Phone from '../../Types/Phone';
describe('Faker Class', () => {
it('should generate a random name with alphanumeric characters', () => {
expect(Faker.generateName()).toMatch(/^[a-zA-Z0-9]{10}$/);
});
it('should generate a random company name', () => {
expect(Faker.generateCompanyName()).toBeTruthy();
});
it('should generate a string of random numbers of specified length', () => {
expect(Faker.randomNumbers(8)).toMatch(/^\d{8}$/);
});
it('should generate a user full name', () => {
const userFullName: Name = Faker.generateUserFullName();
expect(userFullName).toHaveProperty('name');
expect(userFullName.name).toBeTruthy();
});
it('should generate a valid email address', () => {
const email: Email = Faker.generateEmail();
expect(email.email).toMatch(
/^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,9}$/i
);
});
it('should generate a valid phone number', () => {
const phone: Phone = Faker.generatePhone();
expect(phone.phone).toMatch(
/^[+]?[(]?[0-9]{3}[)]?[-\s.]?[0-9]{3}[-\s.]?[0-9]{4,7}$/
);
});
});

View File

@@ -1,9 +1,12 @@
import Slug from '../../Utils/Slug';
describe('Slug.getSlug()', () => {
test('should return empty string, if name is empty ', () => {
test('should return empty string, if name is empty ', () => {
expect(Slug.getSlug('')).toEqual('');
expect(Slug.getSlug(' ')).toEqual('');
});
test('should generate a slug from a valid name when name is null', () => {
expect(Slug.getSlug(null)).toMatch(/^[a-z0-9-]+$/);
});
test('should replaces spaces in nonEmpty with hyphen -', () => {
expect(Slug.getSlug('this is slug')).toMatch(/this-is-slug/g);
});

View File

@@ -20,6 +20,8 @@ export default class Hostname extends DatabaseProperty {
}
public set hostname(value: string) {
value = value.trim();
if (Hostname.isValid(value)) {
this._route = value;
} else {

View File

@@ -209,7 +209,15 @@ export default class URL extends DatabaseProperty {
return this;
}
public addQueryParam(paramName: string, value: string): URL {
public addQueryParam(
paramName: string,
value: string,
encode?: boolean | undefined
): URL {
if (encode) {
value = encodeURIComponent(value);
}
this.params[paramName] = value;
return this;
}

View File

@@ -1,7 +1,12 @@
import TableColumnType from '../BaseDatabase/TableColumnType';
import { ColumnAccessControl } from '../BaseDatabase/AccessControl';
import ColumnBillingAccessControl from '../BaseDatabase/ColumnBillingAccessControl';
import TableColumnType from '../AnalyticsDatabase/TableColumnType';
import { JSONValue } from '../JSON';
import NestedModel from '../../AnalyticsModels/NestedModel';
export default class AnalyticsTableColumn {
private _key: string = 'id';
public get key(): string {
return this._key;
}
@@ -33,7 +38,15 @@ export default class AnalyticsTableColumn {
this._required = v;
}
private _type: TableColumnType = TableColumnType.ShortText;
private _isTenantId: boolean = false;
public get isTenantId(): boolean {
return this._isTenantId;
}
public set isTenantId(v: boolean) {
this._isTenantId = v;
}
private _type: TableColumnType = TableColumnType.Text;
public get type(): TableColumnType {
return this._type;
}
@@ -41,17 +54,96 @@ export default class AnalyticsTableColumn {
this._type = v;
}
private _forceGetDefaultValueOnCreate?:
| (() => Date | string | number | boolean)
| undefined;
public get forceGetDefaultValueOnCreate():
| (() => Date | string | number | boolean)
| undefined {
return this._forceGetDefaultValueOnCreate;
}
public set forceGetDefaultValueOnCreate(
v: (() => Date | string | number | boolean) | undefined
) {
this._forceGetDefaultValueOnCreate = v;
}
private _defaultValue: JSONValue | undefined;
public get defaultValue(): JSONValue {
return this._defaultValue;
}
public set defaultValue(v: JSONValue) {
this._defaultValue = v;
}
public get isDefaultValueColumn(): boolean {
return Boolean(this.defaultValue !== undefined);
}
private _billingAccessControl?: ColumnBillingAccessControl | undefined;
public get billingAccessControl(): ColumnBillingAccessControl | undefined {
return this._billingAccessControl;
}
public set billingAccessControl(v: ColumnBillingAccessControl | undefined) {
this._billingAccessControl = v;
}
private _allowAccessIfSubscriptionIsUnpaid: boolean = false;
public get allowAccessIfSubscriptionIsUnpaid(): boolean {
return this._allowAccessIfSubscriptionIsUnpaid;
}
public set allowAccessIfSubscriptionIsUnpaid(v: boolean) {
this._allowAccessIfSubscriptionIsUnpaid = v;
}
private _accessControl: ColumnAccessControl | undefined;
public get accessControl(): ColumnAccessControl | undefined {
return this._accessControl;
}
public set accessControl(v: ColumnAccessControl | undefined) {
this._accessControl = v;
}
private _nestedModel?: NestedModel | undefined;
public get nestedModel(): NestedModel | undefined {
return this._nestedModel;
}
public set nestedModel(v: NestedModel | undefined) {
this._nestedModel = v;
}
public constructor(data: {
key: string;
nestedModel?: NestedModel | undefined;
title: string;
description: string;
required: boolean;
defaultValue?: JSONValue | undefined;
type: TableColumnType;
billingAccessControl?: ColumnBillingAccessControl | undefined;
isTenantId?: boolean | undefined;
accessControl?: ColumnAccessControl | undefined;
allowAccessIfSubscriptionIsUnpaid?: boolean | undefined;
forceGetDefaultValueOnCreate?:
| (() => Date | string | number | boolean)
| undefined;
}) {
if (data.type === TableColumnType.NestedModel && !data.nestedModel) {
throw new Error('NestedModel is required when type is NestedModel');
}
this.accessControl = data.accessControl;
this.key = data.key;
this.title = data.title;
this.description = data.description;
this.required = data.required;
this.type = data.type;
this.isTenantId = data.isTenantId || false;
this.forceGetDefaultValueOnCreate = data.forceGetDefaultValueOnCreate;
this.defaultValue = data.defaultValue;
this.billingAccessControl = data.billingAccessControl;
this.allowAccessIfSubscriptionIsUnpaid =
data.allowAccessIfSubscriptionIsUnpaid || false;
this.nestedModel = data.nestedModel;
}
}

View File

@@ -0,0 +1,15 @@
enum ColumnType {
ObjectID = 'Object ID',
Date = 'Date',
Boolean = 'Boolean',
Number = 'Number',
Text = 'Text',
NestedModel = 'Nested Model',
JSON = 'JSON',
Decimal = 'Decimal',
ArrayNumber = 'Array of Numbers',
ArrayText = 'Array of Text',
LongNumber = 'Long Number',
}
export default ColumnType;

View File

@@ -0,0 +1,11 @@
import Permission from '../Permission';
export interface ColumnAccessControl {
read: Array<Permission>;
create: Array<Permission>;
update: Array<Permission>;
}
export interface TableAccessControl extends ColumnAccessControl {
delete: Array<Permission>;
}

View File

@@ -0,0 +1,7 @@
import { PlanSelect } from '../Billing/SubscriptionPlan';
export default interface ColumnBillingAccessControl {
create: PlanSelect;
read: PlanSelect;
update: PlanSelect;
}

View File

@@ -20,4 +20,5 @@ export default interface DatabaseCommonInteractionProps {
ignoreHooks?: boolean | undefined;
currentPlan?: PlanSelect | undefined;
isSubscriptionUnpaid?: boolean | undefined;
isMasterAdmin?: boolean | undefined;
}

View File

@@ -0,0 +1,73 @@
import Permission, { UserPermission } from '../Permission';
import DatabaseCommonInteractionProps from './DatabaseCommonInteractionProps';
export default class DatabaseCommonInteractionPropsUtil {
public static getUserPermissions(
props: DatabaseCommonInteractionProps
): Array<UserPermission> {
// Check first if the user has Global Permissions.
// Global permissions includes all the tenantId user has access to.
// and it includes all the global permissions that applies to all the tenant, like PUBLIC.
if (!props.userGlobalAccessPermission) {
props.userGlobalAccessPermission = {
globalPermissions: [Permission.Public],
projectIds: [],
_type: 'UserGlobalAccessPermission',
};
}
// If the PUBLIC Permission is not found in global permissions, include it.
if (
props.userGlobalAccessPermission &&
!props.userGlobalAccessPermission.globalPermissions.includes(
Permission.Public
)
) {
props.userGlobalAccessPermission.globalPermissions.push(
Permission.Public
); // add public permission if not already.
}
// If the CurrentUser Permission is not found in global permissions, include it.
if (
props.userId &&
props.userGlobalAccessPermission &&
!props.userGlobalAccessPermission.globalPermissions.includes(
Permission.CurrentUser
)
) {
props.userGlobalAccessPermission.globalPermissions.push(
Permission.CurrentUser
);
}
let userPermissions: Array<UserPermission> = [];
// Include global permission in userPermissions.
if (props.userGlobalAccessPermission) {
/// take global permissions.
userPermissions =
props.userGlobalAccessPermission.globalPermissions.map(
(permission: Permission) => {
return {
permission: permission,
labelIds: [],
_type: 'UserPermission',
};
}
);
}
if (props.tenantId && props.userTenantAccessPermission) {
// Include Tenant Permission in userPermissions.
userPermissions = [
...userPermissions,
...(props.userTenantAccessPermission[props.tenantId.toString()]
?.permissions || []),
];
}
return userPermissions;
}
}

View File

@@ -0,0 +1,6 @@
export default interface EnableWorkflowOn {
create?: boolean | undefined;
update?: boolean | undefined;
delete?: boolean | undefined;
read?: boolean | undefined;
}

View File

@@ -0,0 +1,8 @@
import { PlanSelect } from '../Billing/SubscriptionPlan';
export default interface TableBillingAccessControl {
create: PlanSelect;
read: PlanSelect;
update: PlanSelect;
delete: PlanSelect;
}

View File

@@ -28,3 +28,14 @@ export interface CallRequestMessage {
export default interface CallRequest extends CallRequestMessage {
to: Phone;
}
export const isHighRiskPhoneNumber: Function = (
phoneNumber: Phone
): boolean => {
// Pakistan
if (phoneNumber.toString().startsWith('+92')) {
return true;
}
return false;
};

View File

@@ -1,25 +0,0 @@
import { PlanSelect } from '../../Billing/SubscriptionPlan';
import Permission from '../../Permission';
export interface ColumnAccessControl {
read: Array<Permission>;
create: Array<Permission>;
update: Array<Permission>;
}
export interface TableAccessControl extends ColumnAccessControl {
delete: Array<Permission>;
}
export interface BillingAccessControl {
create: PlanSelect;
read: PlanSelect;
update: PlanSelect;
delete: PlanSelect;
}
export interface ColumnBillingAccessControl {
create: PlanSelect;
read: PlanSelect;
update: PlanSelect;
}

View File

@@ -2,7 +2,7 @@ import 'reflect-metadata';
import BaseModel from '../../../Models/BaseModel';
import Dictionary from '../../Dictionary';
import { ReflectionMetadataType } from '../../Reflection';
import { ColumnAccessControl } from './AccessControl';
import { ColumnAccessControl } from '../../BaseDatabase/AccessControl';
const accessControlSymbol: Symbol = Symbol('ColumnAccessControl');

View File

@@ -2,7 +2,7 @@ import 'reflect-metadata';
import BaseModel from '../../../Models/BaseModel';
import Dictionary from '../../Dictionary';
import { ReflectionMetadataType } from '../../Reflection';
import { ColumnBillingAccessControl } from './AccessControl';
import ColumnBillingAccessControl from '../../BaseDatabase/ColumnBillingAccessControl';
const accessControlSymbol: Symbol = Symbol('ColumnBillingAccessControl');

View File

@@ -1,4 +1,4 @@
import { TableAccessControl } from './AccessControl';
import { TableAccessControl } from '../../BaseDatabase/AccessControl';
export default (accessControl: TableAccessControl) => {
return (ctr: Function) => {

View File

@@ -1,6 +1,6 @@
import { BillingAccessControl } from './AccessControl';
import TableBillingAccessControl from '../../BaseDatabase/TableBillingAccessControl';
export default (accessControl: BillingAccessControl) => {
export default (accessControl: TableBillingAccessControl) => {
return (ctr: Function) => {
if (accessControl.create) {
ctr.prototype.createBillingPlan = accessControl.create;

View File

@@ -1,4 +1,4 @@
import TableColumnType from '../BaseDatabase/TableColumnType';
import TableColumnType from './TableColumnType';
enum ColumnLength {
Version = 30,

View File

@@ -0,0 +1,7 @@
import EnableWorkflowOn from '../BaseDatabase/EnableWorkflowOn';
export default (enableWorkflowOn: EnableWorkflowOn) => {
return (ctr: Function) => {
ctr.prototype.enableWorkflowOn = enableWorkflowOn;
};
};

View File

@@ -2,7 +2,7 @@ import 'reflect-metadata';
import BaseModel from '../../Models/BaseModel';
import Dictionary from '../Dictionary';
import { ReflectionMetadataType } from '../Reflection';
import TableColumnType from '../BaseDatabase/TableColumnType';
import TableColumnType from './TableColumnType';
const tableColumn: Symbol = Symbol('TableColumn');

View File

@@ -29,6 +29,10 @@ export default class OneUptimeDate {
return this.getSomeDaysAgo(new PositiveNumber(1));
}
public static fromUnixNano(timestamp: number): Date {
return moment(timestamp / 1000000).toDate();
}
public static getSecondsTo(date: Date): number {
date = this.fromString(date);
const dif: number = date.getTime() - this.getCurrentDate().getTime();
@@ -98,6 +102,10 @@ export default class OneUptimeDate {
return days.positiveNumber * 24 * 60 * 60;
}
public static getMillisecondsInDays(days: PositiveNumber | number): number {
return this.getSecondsInDays(days) * 1000;
}
public static getSomeHoursAgo(hours: PositiveNumber | number): Date {
if (!(hours instanceof PositiveNumber)) {
hours = new PositiveNumber(hours);
@@ -221,7 +229,7 @@ export default class OneUptimeDate {
let hasMins: boolean = false;
if (hours !== '00') {
hasHours = true;
text += hours + ' hours';
text += hours + ' hours ';
}
if (mins !== '00' || hasHours) {
@@ -231,7 +239,7 @@ export default class OneUptimeDate {
text += ', ';
}
text += mins + ' minutes';
text += mins + ' minutes ';
}
if (!(hasHours && hasMins)) {
@@ -351,6 +359,12 @@ export default class OneUptimeDate {
return moment(date).isAfter(startDate);
}
public static isEqualBySeconds(date: Date, startDate: Date): boolean {
date = this.fromString(date);
startDate = this.fromString(startDate);
return moment(date).isSame(startDate, 'seconds');
}
public static hasExpired(expirationDate: Date): boolean {
expirationDate = this.fromString(expirationDate);
return !moment(this.getCurrentDate()).isBefore(expirationDate);

View File

@@ -10,12 +10,15 @@ enum IconProp {
Settings = 'Settings',
Criteria = 'Criteria',
Notification = 'Notification',
Squares = 'Squares',
Help = 'Help',
JSON = 'JSON',
Signal = 'Signal',
Database = 'Database',
ChevronDown = 'ChevronDown',
Pencil = 'Pencil',
Flag = 'Flag',
Copy = 'Copy',
ChevronRight = 'ChevronRight',
ChevronUp = 'ChevronUp',
Play = 'Play',
@@ -25,6 +28,7 @@ enum IconProp {
Home = 'Home',
Graph = 'Graph',
Variable = 'Variable',
ListBullet = 'ListBullet',
Image = 'Image',
Grid = 'Grid',
More = 'More',

View File

@@ -123,6 +123,7 @@ export type JSONValue =
| Array<JSONValue>
| Array<Permission>
| Array<JSONValue>
| Array<ObjectID>
| CallRequest
| undefined
| null;

View File

@@ -4,7 +4,7 @@ import OneUptimeDate from './Date';
import BaseModel from '../Models/BaseModel';
import { JSONArray, JSONObject, JSONValue, ObjectType } from './JSON';
import { TableColumnMetadata } from '../Types/Database/TableColumn';
import TableColumnType from './BaseDatabase/TableColumnType';
import TableColumnType from './Database/TableColumnType';
import SerializableObject from './SerializableObject';
import SerializableObjectDictionary from './SerializableObjectDictionary';
import JSON5 from 'json5';

View File

@@ -1,12 +0,0 @@
export interface EnableWorkflowOn {
create?: boolean | undefined;
update?: boolean | undefined;
delete?: boolean | undefined;
read?: boolean | undefined;
}
export default (enableWorkflowOn: EnableWorkflowOn) => {
return (ctr: Function) => {
ctr.prototype.enableWorkflowOn = enableWorkflowOn;
};
};

View File

@@ -9,6 +9,7 @@ export enum CheckOn {
RequestBody = 'Request Body',
RequestHeader = 'Request Header',
RequestHeaderValue = 'Request Header Value',
JavaScriptExpression = 'JavaScript Expression',
}
export interface CriteriaFilter {
@@ -34,6 +35,7 @@ export enum FilterType {
False = 'False',
NotRecievedInMinutes = 'Not Recieved In Minutes',
RecievedInMinutes = 'Recieved In Minutes',
EvaluatesToTrue = 'Evaluates To True',
}
export enum FilterCondition {

View File

@@ -76,7 +76,7 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
changeMonitorStatus: true,
createIncidents: false,
name: 'Check if online',
description: 'This criteria checks if the monitor is online',
description: `This criteria checks if the ${arg.monitorType} is online`,
};
if (
@@ -118,8 +118,8 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
],
incidents: [
{
title: `${arg.monitorType} monitor is offline`,
description: `${arg.monitorType} monitor is currently offline.`,
title: `${arg.monitorType} is offline`,
description: `${arg.monitorType} is currently offline.`,
incidentSeverityId: arg.incidentSeverityId,
autoResolveIncident: true,
id: ObjectID.generate().toString(),
@@ -129,7 +129,7 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
changeMonitorStatus: true,
createIncidents: true,
name: 'Check if offline',
description: 'This criteria checks if the monitor is offline',
description: `This criteria checks if the ${arg.monitorType} is offline`,
};
}
@@ -155,8 +155,8 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
],
incidents: [
{
title: `${arg.monitorType} monitor is offline`,
description: `${arg.monitorType} monitor is currently offline.`,
title: `${arg.monitorType} is offline`,
description: `${arg.monitorType} is currently offline.`,
incidentSeverityId: arg.incidentSeverityId,
autoResolveIncident: true,
id: ObjectID.generate().toString(),
@@ -166,7 +166,7 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
changeMonitorStatus: true,
createIncidents: true,
name: 'Check if offline',
description: 'This criteria checks if the monitor is offline',
description: `This criteria checks if the ${arg.monitorType} is offline`,
};
}
@@ -184,8 +184,8 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
],
incidents: [
{
title: `${arg.monitorType} monitor is offline`,
description: `${arg.monitorType} monitor is currently offline.`,
title: `${arg.monitorType} is offline`,
description: `${arg.monitorType} is currently offline.`,
incidentSeverityId: arg.incidentSeverityId,
autoResolveIncident: true,
id: ObjectID.generate().toString(),
@@ -195,7 +195,7 @@ export default class MonitorCriteriaInstance extends DatabaseProperty {
changeMonitorStatus: true,
createIncidents: true,
name: 'Check if offline',
description: 'This criteria checks if the monitor is offline',
description: `This criteria checks if the ${arg.monitorType} is offline`,
};
}

Some files were not shown because too many files have changed in this diff Show More