Compare commits

...

94 Commits

Author SHA1 Message Date
Owen
f2d4c2f83c Remove duplicate target 2025-12-13 12:16:11 -05:00
Owen
25fed23758 Speed up build 2025-12-13 12:13:33 -05:00
Owen Schwartz
5cb3fa1127 Merge pull request #2066 from fosrl/dev
Dev
2025-12-13 12:09:22 -05:00
Owen
deac26bad2 Bump version 2025-12-13 12:07:35 -05:00
miloschwartz
c7747fd4b4 add license watermark 2025-12-13 11:45:15 -05:00
Owen
1aaad43871 Format 2025-12-13 11:36:53 -05:00
Owen
143175bde7 Update react-dom 2025-12-13 11:34:58 -05:00
Owen
9f55d6b20a Try to fix issue not sending newt commands 2025-12-13 11:19:42 -05:00
miloschwartz
4366ca5836 add spacing to delete modal 2025-12-13 10:57:24 -05:00
miloschwartz
d5307adef0 fix bug preventing save resource priority closes #2063 2025-12-12 22:52:00 -05:00
miloschwartz
3d857c3b52 fix client side pagination issue 2025-12-12 22:41:10 -05:00
Owen
a012369f83 Make sure to always check retention first
Fixes #2061
2025-12-12 18:39:13 -05:00
Owen
ba99614d58 Merge branch 'dev' of github.com:fosrl/pangolin into dev 2025-12-12 14:54:59 -05:00
Owen
27db77bca4 Format 2025-12-12 14:53:26 -05:00
miloschwartz
29b924230f add runner restart action 2025-12-12 14:48:49 -05:00
Owen
8eb3f6aacc Bump next and react again
CVE-2025-55184 and CVE-2025-67779
2025-12-12 09:55:52 -05:00
Owen
5d6ee45125 Merge branch 'dev' 2025-12-11 16:49:40 -05:00
Owen Schwartz
fceaedfcd8 Merge pull request #2045 from Fredkiss3/fix/update-full-domain-on-resource-page
fix: full domain should be updated when the form is saved on resource proxy page
2025-12-11 16:49:22 -05:00
Fred KISSIE
181612ce25 🐛 full domain should be updated when the form is saved 2025-12-11 22:26:38 +01:00
Owen
224b78fc64 Update consts 2025-12-11 16:13:33 -05:00
Owen
757e540be6 Merge branch 'main' into dev 2025-12-11 16:12:08 -05:00
Milo Schwartz
bf1675686c Update README.md 2025-12-11 15:44:47 -05:00
miloschwartz
f81909489a add client telmetry and fix missing openapi on prefault 2025-12-11 10:38:48 -05:00
miloschwartz
963468d7fa remove top border from dialog 2025-12-11 10:17:17 -05:00
miloschwartz
f67f4f8834 update screenshots and readme 2025-12-10 21:13:09 -05:00
Owen
4c819d264b Only permit ipv4 for now 2025-12-10 20:40:22 -05:00
Owen Schwartz
cbcb23ccea Merge pull request #2036 from fosrl/dependabot/npm_and_yarn/dev-minor-updates-316ddb12fb
Bump @types/pg from 8.15.6 to 8.16.0 in the dev-minor-updates group
2025-12-10 20:35:49 -05:00
Owen Schwartz
d8b27de5ac Merge pull request #2038 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-0a59212de9
Bump the prod-minor-updates group across 1 directory with 4 updates
2025-12-10 20:35:43 -05:00
Owen
01f7842fd5 Fix function rename issue 2025-12-10 20:34:40 -05:00
Owen Schwartz
d409e58186 Merge pull request #2020 from Fredkiss3/fix/log-analytics-adjustments
refactor: adjustments for logs pages
2025-12-10 20:33:12 -05:00
Owen Schwartz
c9e1c4da1c Merge pull request #2026 from fosrl/crowdin_dev
New Crowdin updates
2025-12-10 20:25:31 -05:00
dependabot[bot]
9c38f65ad4 Bump the prod-minor-updates group across 1 directory with 4 updates
Bumps the prod-minor-updates group with 4 updates in the / directory: [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3), [lucide-react](https://github.com/lucide-icons/lucide/tree/HEAD/packages/lucide-react), [npm](https://github.com/npm/cli) and [resend](https://github.com/resend/resend-node).


Updates `@aws-sdk/client-s3` from 3.947.0 to 3.948.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.948.0/clients/client-s3)

Updates `lucide-react` from 0.556.0 to 0.559.0
- [Release notes](https://github.com/lucide-icons/lucide/releases)
- [Commits](https://github.com/lucide-icons/lucide/commits/0.559.0/packages/lucide-react)

Updates `npm` from 11.6.4 to 11.7.0
- [Release notes](https://github.com/npm/cli/releases)
- [Changelog](https://github.com/npm/cli/blob/latest/CHANGELOG.md)
- [Commits](https://github.com/npm/cli/compare/v11.6.4...v11.7.0)

Updates `resend` from 6.5.2 to 6.6.0
- [Release notes](https://github.com/resend/resend-node/releases)
- [Commits](https://github.com/resend/resend-node/compare/v6.5.2...v6.6.0)

---
updated-dependencies:
- dependency-name: "@aws-sdk/client-s3"
  dependency-version: 3.948.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: lucide-react
  dependency-version: 0.559.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: npm
  dependency-version: 11.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: resend
  dependency-version: 6.6.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:21:19 +00:00
dependabot[bot]
2316462721 Bump @types/pg from 8.15.6 to 8.16.0 in the dev-minor-updates group
Bumps the dev-minor-updates group with 1 update: [@types/pg](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/pg).


Updates `@types/pg` from 8.15.6 to 8.16.0
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/pg)

---
updated-dependencies:
- dependency-name: "@types/pg"
  dependency-version: 8.16.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: dev-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:19:20 +00:00
Owen Schwartz
7cc990107a Merge pull request #2035 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-3ea7ca757b
Bump react-email from 5.0.6 to 5.0.7 in the dev-patch-updates group
2025-12-10 20:19:00 -05:00
dependabot[bot]
9917a569ac Bump react-email from 5.0.6 to 5.0.7 in the dev-patch-updates group
Bumps the dev-patch-updates group with 1 update: [react-email](https://github.com/resend/react-email/tree/HEAD/packages/react-email).


Updates `react-email` from 5.0.6 to 5.0.7
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/react-email/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/react-email@5.0.7/packages/react-email)

---
updated-dependencies:
- dependency-name: react-email
  dependency-version: 5.0.7
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:18:06 +00:00
Owen
c56574e431 Send site add in case the client does not have the site 2025-12-10 11:57:45 -05:00
Fred KISSIE
f9c0e0ec3d 💬 updated text 2025-12-10 03:56:08 +01:00
Fred KISSIE
85986dcccb Merge branch 'dev' into fix/log-analytics-adjustments 2025-12-10 03:49:40 +01:00
Fred KISSIE
c9779254c3 💄add time range tooltip to explain it better 2025-12-10 03:44:46 +01:00
Fred KISSIE
5b620469c7 ♻️ set export logs limits to 50 000 everywhere 2025-12-10 03:42:53 +01:00
Fred KISSIE
df4b9de334 🚧 wip: export limits 2025-12-10 03:24:32 +01:00
Fred KISSIE
d490cab48c Merge dev into fix/log-analytics-adjustments 2025-12-10 03:19:14 +01:00
miloschwartz
b68c0962c6 visual enhancements 2025-12-09 20:58:45 -05:00
Owen
ee2a438602 Merge branch 'main' into dev 2025-12-09 16:26:21 -05:00
Owen
74dd3fdc9f Update packages 2025-12-09 16:18:20 -05:00
Owen
314da3ee3e Update formatting to work with ipv6 2025-12-09 16:11:12 -05:00
Owen Schwartz
68cfc84249 New translations en-us.json (German) 2025-12-09 14:09:22 -05:00
Owen
0bcf5c2b42 Update packages 2025-12-09 12:09:23 -05:00
Owen
9210e005e9 Merge branch 'main' into dev 2025-12-09 12:08:32 -05:00
Owen
f245632371 Fix expires at not updating 2025-12-09 11:50:48 -05:00
miloschwartz
6453b070bb add more resiliency to the license check 2025-12-09 11:26:11 -05:00
Owen Schwartz
8c4db93a93 Merge pull request #2024 from fosrl/dependabot/npm_and_yarn/multi-1eaea4558a
Bump next and @react-email/preview-server
2025-12-09 10:57:54 -05:00
Owen
f9b03943c3 Format all files 2025-12-09 10:56:14 -05:00
Owen
fa839a811f Merge branch 'Fredkiss3-chore/some-dx-changes' into dev 2025-12-09 10:54:35 -05:00
Owen
88d2c2eac8 Merge branch 'chore/some-dx-changes' of github.com:Fredkiss3/pangolin into Fredkiss3-chore/some-dx-changes 2025-12-09 10:54:28 -05:00
dependabot[bot]
c84cc1815b Bump next and @react-email/preview-server
Bumps [next](https://github.com/vercel/next.js) to 15.5.7 and updates ancestor dependency [@react-email/preview-server](https://github.com/resend/react-email/tree/HEAD/packages/preview-server). These dependencies need to be updated together.


Updates `next` from 15.5.2 to 15.5.7
- [Release notes](https://github.com/vercel/next.js/releases)
- [Changelog](https://github.com/vercel/next.js/blob/canary/release.js)
- [Commits](https://github.com/vercel/next.js/compare/v15.5.2...v15.5.7)

Updates `@react-email/preview-server` from 4.3.2 to 5.0.6
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/preview-server/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/preview-server@5.0.6/packages/preview-server)

---
updated-dependencies:
- dependency-name: next
  dependency-version: 15.5.7
  dependency-type: indirect
- dependency-name: "@react-email/preview-server"
  dependency-version: 5.0.6
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 15:50:48 +00:00
Owen
2c23ffd178 Merge branch 'dev' of github.com:fosrl/pangolin into dev 2025-12-09 10:50:33 -05:00
Owen Schwartz
da3f7ae404 Merge pull request #2018 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-a5bcaae1b2
Bump the prod-minor-updates group across 1 directory with 4 updates
2025-12-09 10:49:02 -05:00
Owen Schwartz
f460559a4b Merge pull request #2021 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-cd2d8c1767
Bump @types/node from 24.10.1 to 24.10.2 in the dev-patch-updates group
2025-12-09 10:48:10 -05:00
Owen Schwartz
0c9deeb2d7 Merge pull request #2022 from fosrl/dependabot/go_modules/install/prod-minor-updates-f9eb33633d
Bump golang.org/x/term from 0.37.0 to 0.38.0 in /install in the prod-minor-updates group
2025-12-09 10:48:02 -05:00
Owen Schwartz
1289b99f14 Merge pull request #2009 from fosrl/dependabot/npm_and_yarn/stripe-20.0.0
Bump stripe from 18.2.1 to 20.0.0
2025-12-09 10:47:54 -05:00
Owen Schwartz
1a7a6e5b6f Merge pull request #2007 from fosrl/dependabot/npm_and_yarn/react-email/render-2.0.0
Bump @react-email/render from 1.4.0 to 2.0.0
2025-12-09 10:47:00 -05:00
Owen Schwartz
f56135eed3 Merge pull request #2011 from Lokowitz/fix-dev
Update packages
2025-12-09 10:46:32 -05:00
Owen
23e9a61f3e Fixing various bugs 2025-12-09 10:31:43 -05:00
Lokowitz
5428ad1009 merge upstream 2025-12-09 11:40:57 +00:00
Lokowitz
bba28bc5f2 Merge remote-tracking branch 'upstream/dev' into fix-dev 2025-12-09 11:40:04 +00:00
Owen
18498a32ce Quite log messages 2025-12-08 22:07:17 -05:00
Owen
887af85db1 Fix removing remote subnet on remove site resource 2025-12-08 22:06:37 -05:00
Owen
a306aa971b Pick client endpoint as part of the transation 2025-12-08 21:37:17 -05:00
Owen
0a9b19ecfc Try to fix deadlocks again
Fixes FOU-284
2025-12-08 21:26:23 -05:00
Owen
e011580b96 Update and add server version 2025-12-08 21:26:23 -05:00
miloschwartz
048ce850a8 get coutry using maxmind and clear stale device codes 2025-12-08 21:12:19 -05:00
dependabot[bot]
2ca1f15add Bump the prod-minor-updates group across 1 directory with 4 updates
Bumps the prod-minor-updates group with 4 updates in the / directory: [@asteasolutions/zod-to-openapi](https://github.com/asteasolutions/zod-to-openapi), [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3), [react-day-picker](https://github.com/gpbl/react-day-picker) and [winston](https://github.com/winstonjs/winston).


Updates `@asteasolutions/zod-to-openapi` from 8.1.0 to 8.2.0
- [Release notes](https://github.com/asteasolutions/zod-to-openapi/releases)
- [Commits](https://github.com/asteasolutions/zod-to-openapi/compare/v8.1.0...v8.2.0)

Updates `@aws-sdk/client-s3` from 3.943.0 to 3.946.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.946.0/clients/client-s3)

Updates `react-day-picker` from 9.11.3 to 9.12.0
- [Release notes](https://github.com/gpbl/react-day-picker/releases)
- [Changelog](https://github.com/gpbl/react-day-picker/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gpbl/react-day-picker/compare/v9.11.3...v9.12.0)

Updates `winston` from 3.18.3 to 3.19.0
- [Release notes](https://github.com/winstonjs/winston/releases)
- [Changelog](https://github.com/winstonjs/winston/blob/master/CHANGELOG.md)
- [Commits](https://github.com/winstonjs/winston/compare/v3.18.3...v3.19.0)

---
updated-dependencies:
- dependency-name: "@asteasolutions/zod-to-openapi"
  dependency-version: 8.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: "@aws-sdk/client-s3"
  dependency-version: 3.946.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: react-day-picker
  dependency-version: 9.12.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: winston
  dependency-version: 3.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:23:08 +00:00
dependabot[bot]
05ebd547b5 Bump golang.org/x/term in /install in the prod-minor-updates group
Bumps the prod-minor-updates group in /install with 1 update: [golang.org/x/term](https://github.com/golang/term).


Updates `golang.org/x/term` from 0.37.0 to 0.38.0
- [Commits](https://github.com/golang/term/compare/v0.37.0...v0.38.0)

---
updated-dependencies:
- dependency-name: golang.org/x/term
  dependency-version: 0.38.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:21:11 +00:00
dependabot[bot]
5a8b1383a4 Bump @types/node from 24.10.1 to 24.10.2 in the dev-patch-updates group
Bumps the dev-patch-updates group with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 24.10.1 to 24.10.2
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 24.10.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:20:03 +00:00
miloschwartz
ede51bebb5 use semver to compare versions in product updates 2025-12-08 19:51:32 -05:00
Owen Schwartz
fd29071d57 Merge pull request #2004 from fosrl/dependabot/github_actions/actions/checkout-6.0.1
Bump actions/checkout from 6.0.0 to 6.0.1
2025-12-08 19:48:01 -05:00
Owen Schwartz
8e1af79dc4 Merge pull request #2003 from fosrl/dependabot/github_actions/actions/setup-node-6.1.0
Bump actions/setup-node from 6.0.0 to 6.1.0
2025-12-08 19:47:48 -05:00
Owen Schwartz
dc8c28626d Merge pull request #2002 from fosrl/dependabot/github_actions/actions/stale-10.1.1
Bump actions/stale from 10.1.0 to 10.1.1
2025-12-08 19:47:40 -05:00
Fred KISSIE
9db2feff77 ♻️ set default time to 7 days ago in API too 2025-12-09 00:17:34 +01:00
Fred KISSIE
adf76bfb53 ♻️ set default start time to 7 days ago 2025-12-08 23:56:28 +01:00
Fred KISSIE
e0a79b7d4d ♻️ set default log analytics time range to. 7days ago 2025-12-08 22:57:05 +01:00
dependabot[bot]
9ea3914a93 Bump @react-email/render from 1.4.0 to 2.0.0
Bumps [@react-email/render](https://github.com/resend/react-email/tree/HEAD/packages/render) from 1.4.0 to 2.0.0.
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/render/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/render@2.0.0/packages/render)

---
updated-dependencies:
- dependency-name: "@react-email/render"
  dependency-version: 2.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 19:13:04 +00:00
miloschwartz
1aeb31be04 remove file 2025-12-08 14:12:10 -05:00
Fred KISSIE
64120ea878 🔨Add format script and install prettier 2025-12-08 19:57:08 +01:00
Fred KISSIE
0003ec021b 🔨add default vscode options for new contributors 2025-12-08 19:56:53 +01:00
Lokowitz
c9a1da210f revert my fix 2025-12-08 08:27:05 +00:00
Lokowitz
ace402af2d update packages 2025-12-08 08:23:32 +00:00
Lokowitz
e60dce25c9 Merge remote-tracking branch 'upstream/dev' into fix-dev
merge dev
2025-12-08 08:21:19 +00:00
dependabot[bot]
ccfff030e5 Bump stripe from 18.2.1 to 20.0.0
Bumps [stripe](https://github.com/stripe/stripe-node) from 18.2.1 to 20.0.0.
- [Release notes](https://github.com/stripe/stripe-node/releases)
- [Changelog](https://github.com/stripe/stripe-node/blob/master/CHANGELOG.md)
- [Commits](https://github.com/stripe/stripe-node/compare/v18.2.1...v20.0.0)

---
updated-dependencies:
- dependency-name: stripe
  dependency-version: 20.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:25:10 +00:00
dependabot[bot]
00765c1faf Bump actions/checkout from 6.0.0 to 6.0.1
Bumps [actions/checkout](https://github.com/actions/checkout) from 6.0.0 to 6.0.1.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](1af3b93b68...8e8c483db8)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: 6.0.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:59 +00:00
dependabot[bot]
f6bbdeadb9 Bump actions/setup-node from 6.0.0 to 6.1.0
Bumps [actions/setup-node](https://github.com/actions/setup-node) from 6.0.0 to 6.1.0.
- [Release notes](https://github.com/actions/setup-node/releases)
- [Commits](2028fbc5c2...395ad32622)

---
updated-dependencies:
- dependency-name: actions/setup-node
  dependency-version: 6.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:53 +00:00
dependabot[bot]
9cf520574a Bump actions/stale from 10.1.0 to 10.1.1
Bumps [actions/stale](https://github.com/actions/stale) from 10.1.0 to 10.1.1.
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](5f858e3efb...997185467f)

---
updated-dependencies:
- dependency-name: actions/stale
  dependency-version: 10.1.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:48 +00:00
Lokowitz
f8ab5b7af7 update packages 2025-12-07 14:03:34 +00:00
596 changed files with 10112 additions and 9759 deletions

View File

@@ -1,6 +1,3 @@
{
"extends": [
"next/core-web-vitals",
"next/typescript"
]
"extends": ["next/core-web-vitals", "next/typescript"]
}

View File

@@ -36,7 +36,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
@@ -107,7 +107,7 @@ jobs:
- name: Build and push Docker images (Docker Hub)
run: |
TAG=${{ env.TAG }}
make build-release tag=$TAG
make -j4 build-release tag=$TAG
echo "Built & pushed to: ${{ env.DOCKERHUB_IMAGE }}:${TAG}"
shell: bash

View File

@@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: '22'

39
.github/workflows/restart-runners.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: Restart Runners
on:
schedule:
- cron: '0 0 */7 * *'
permissions:
id-token: write
contents: read
jobs:
ec2-maintenance-prod:
runs-on: ubuntu-latest
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
aws-region: ${{ secrets.AWS_REGION }}
- name: Verify AWS identity
run: aws sts get-caller-identity
- name: Start EC2 instance
run: |
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances started"
- name: Wait
run: sleep 600
- name: Stop EC2 instance
run: |
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances stopped"

View File

@@ -14,7 +14,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with:
days-before-stale: 14
days-before-close: 14

View File

@@ -14,9 +14,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: '22'

12
.prettierignore Normal file
View File

@@ -0,0 +1,12 @@
.github/
bruno/
cli/
config/
messages/
next.config.mjs/
public/
tailwind.config.js/
test/
**/*.yml
**/*.yaml
**/*.md

3
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"recommendations": ["esbenp.prettier-vscode"]
}

22
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,22 @@
{
"editor.codeActionsOnSave": {
"source.addMissingImports.ts": "always"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"editor.formatOnSave": true
}

View File

@@ -1,8 +1,13 @@
.PHONY: build build-pg build-release build-arm build-x86 test clean
.PHONY: build dev-build-sqlite dev-build-pg build-release build-arm build-x86 test clean
major_tag := $(shell echo $(tag) | cut -d. -f1)
minor_tag := $(shell echo $(tag) | cut -d. -f1,2)
build-release:
.PHONY: build-release build-sqlite build-postgresql build-ee-sqlite build-ee-postgresql
build-release: build-sqlite build-postgresql build-ee-sqlite build-ee-postgresql
build-sqlite:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
@@ -16,6 +21,12 @@ build-release:
--tag fosrl/pangolin:$(minor_tag) \
--tag fosrl/pangolin:$(tag) \
--push .
build-postgresql:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \
--build-arg BUILD=oss \
--build-arg DATABASE=pg \
@@ -25,6 +36,12 @@ build-release:
--tag fosrl/pangolin:postgresql-$(minor_tag) \
--tag fosrl/pangolin:postgresql-$(tag) \
--push .
build-ee-sqlite:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=sqlite \
@@ -34,6 +51,12 @@ build-release:
--tag fosrl/pangolin:ee-$(minor_tag) \
--tag fosrl/pangolin:ee-$(tag) \
--push .
build-ee-postgresql:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=pg \
@@ -80,10 +103,10 @@ build-arm:
build-x86:
docker buildx build --platform linux/amd64 -t fosrl/pangolin:latest .
build-sqlite:
dev-build-sqlite:
docker build --build-arg DATABASE=sqlite -t fosrl/pangolin:latest .
build-pg:
dev-build-pg:
docker build --build-arg DATABASE=pg -t fosrl/pangolin:postgresql-latest .
test:

View File

@@ -41,7 +41,7 @@
</strong>
</p>
Pangolin is a self-hosted tunneled reverse proxy server with identity and context aware access control, designed to easily expose and protect applications running anywhere. Pangolin acts as a central hub and connects isolated networks — even those behind restrictive firewalls — through encrypted tunnels, enabling easy access to remote services without opening ports or requiring a VPN.
Pangolin is an open-source, identity-based remote access platform built on WireGuard that enables secure, seamless connectivity to private and public resources. Pangolin combines reverse proxy and VPN capabilities into one platform, providing browser-based access to web applications and client-based access to any private resources, all with zero-trust security and granular access control.
## Installation
@@ -60,14 +60,20 @@ Pangolin is a self-hosted tunneled reverse proxy server with identity and contex
## Key Features
Pangolin packages everything you need for seamless application access and exposure into one cohesive platform.
| <img width=500 /> | <img width=500 /> |
|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------|
| **Manage applications in one place**<br /><br /> Pangolin provides a unified dashboard where you can monitor, configure, and secure all of your services regardless of where they are hosted. | <img src="public/screenshots/hero.png" width=500 /><tr></tr> |
| **Reverse proxy across networks anywhere**<br /><br />Route traffic via tunnels to any private network. Pangolin works like a reverse proxy that spans multiple networks and handles routing, load balancing, health checking, and more to the right services on the other end. | <img src="public/screenshots/sites.png" width=500 /><tr></tr> |
| **Enforce identity and context aware rules**<br /><br />Protect your applications with identity and context aware rules such as SSO, OIDC, PIN, password, temporary share links, geolocation, IP, and more. | <img src="public/auth-diagram1.png" width=500 /><tr></tr> |
| **Quickly connect Pangolin sites**<br /><br />Pangolin's lightweight [Newt](https://github.com/fosrl/newt) client runs in userspace and can run anywhere. Use it as a site connector to route traffic to backends across all of your environments. | <img src="public/clip.gif" width=500 /><tr></tr> |
| **Connect remote networks with sites**<br /><br />Pangolin's lightweight site connectors create secure tunnels from remote networks without requiring public IP addresses or open ports. Sites make any network anywhere available for authorized access. | <img src="public/screenshots/sites.png" width=500 /><tr></tr> |
| **Browser-based reverse proxy access**<br /><br />Expose web applications through identity and context-aware tunneled reverse proxies. Pangolin handles routing, load balancing, health checking, and automatic SSL certificates without exposing your network directly to the internet. Users access applications through any web browser with authentication and granular access control. | <img src="public/clip.gif" width=500 /><tr></tr> |
| **Client-based private resource access**<br /><br />Access private resources like SSH servers, databases, RDP, and entire network ranges through Pangolin clients. Intelligent NAT traversal enables connections even through restrictive firewalls, while DNS aliases provide friendly names and fast connections to resources across all your sites. | <img src="public/screenshots/private-resources.png" width=500 /><tr></tr> |
| **Zero-trust granular access**<br /><br />Grant users access to specific resources, not entire networks. Unlike traditional VPNs that expose full network access, Pangolin's zero-trust model ensures users can only reach the applications and services you explicitly define, reducing security risk and attack surface. | <img src="public/screenshots/user-devices.png" width=500 /><tr></tr> |
## Download Clients
Download the Pangolin client for your platform:
- [Mac](https://pangolin.net/downloads/mac)
- [Windows](https://pangolin.net/downloads/windows)
- [Linux](https://pangolin.net/downloads/linux)
## Get Started

View File

@@ -17,4 +17,4 @@
"lib": "@/lib",
"hooks": "@/hooks"
}
}
}

View File

@@ -1,9 +1,7 @@
import { defineConfig } from "drizzle-kit";
import path from "path";
const schema = [
path.join("server", "db", "pg", "schema"),
];
const schema = [path.join("server", "db", "pg", "schema")];
export default defineConfig({
dialect: "postgresql",

View File

@@ -2,9 +2,7 @@ import { APP_PATH } from "@server/lib/consts";
import { defineConfig } from "drizzle-kit";
import path from "path";
const schema = [
path.join("server", "db", "sqlite", "schema"),
];
const schema = [path.join("server", "db", "sqlite", "schema")];
export default defineConfig({
dialect: "sqlite",

View File

@@ -24,20 +24,20 @@ const argv = yargs(hideBin(process.argv))
alias: "e",
describe: "Entry point file",
type: "string",
demandOption: true,
demandOption: true
})
.option("out", {
alias: "o",
describe: "Output file path",
type: "string",
demandOption: true,
demandOption: true
})
.option("build", {
alias: "b",
describe: "Build type (oss, saas, enterprise)",
type: "string",
choices: ["oss", "saas", "enterprise"],
default: "oss",
default: "oss"
})
.help()
.alias("help", "h").argv;
@@ -66,7 +66,9 @@ function privateImportGuardPlugin() {
// Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private"));
const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (!isInServerPrivate) {
const violation = {
@@ -79,8 +81,8 @@ function privateImportGuardPlugin() {
console.log(`PRIVATE IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`);
console.log('');
console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log("");
}
// Return null to let the default resolver handle it
@@ -89,16 +91,20 @@ function privateImportGuardPlugin() {
build.onEnd((result) => {
if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} private import violation(s):`);
console.log(
`\nSUMMARY: Found ${violations.length} private import violation(s):`
);
violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`);
console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
});
console.log('');
console.log("");
result.errors.push({
text: `Private import violations detected: ${violations.length} violation(s) found`,
location: null,
notes: violations.map(v => ({
notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null
}))
@@ -121,7 +127,9 @@ function dynamicImportGuardPlugin() {
// Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private"));
const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (isInServerPrivate) {
const violation = {
@@ -134,8 +142,8 @@ function dynamicImportGuardPlugin() {
console.log(`DYNAMIC IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`);
console.log('');
console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log("");
}
// Return null to let the default resolver handle it
@@ -144,16 +152,20 @@ function dynamicImportGuardPlugin() {
build.onEnd((result) => {
if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`);
console.log(
`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`
);
violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`);
console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
});
console.log('');
console.log("");
result.errors.push({
text: `Dynamic import violations detected: ${violations.length} violation(s) found`,
location: null,
notes: violations.map(v => ({
notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null
}))
@@ -172,21 +184,28 @@ function dynamicImportSwitcherPlugin(buildValue) {
const switches = [];
build.onStart(() => {
console.log(`Dynamic import switcher using build type: ${buildValue}`);
console.log(
`Dynamic import switcher using build type: ${buildValue}`
);
});
build.onResolve({ filter: /^#dynamic\// }, (args) => {
// Extract the path after #dynamic/
const dynamicPath = args.path.replace(/^#dynamic\//, '');
const dynamicPath = args.path.replace(/^#dynamic\//, "");
// Determine the replacement based on build type
let replacement;
if (buildValue === "oss") {
replacement = `#open/${dynamicPath}`;
} else if (buildValue === "saas" || buildValue === "enterprise") {
} else if (
buildValue === "saas" ||
buildValue === "enterprise"
) {
replacement = `#closed/${dynamicPath}`; // We use #closed here so that the route guards dont complain after its been changed but this is the same as #private
} else {
console.warn(`Unknown build type '${buildValue}', defaulting to #open/`);
console.warn(
`Unknown build type '${buildValue}', defaulting to #open/`
);
replacement = `#open/${dynamicPath}`;
}
@@ -201,8 +220,10 @@ function dynamicImportSwitcherPlugin(buildValue) {
console.log(`DYNAMIC IMPORT SWITCH:`);
console.log(` File: ${args.importer}`);
console.log(` Original: ${args.path}`);
console.log(` Switched to: ${replacement} (build: ${buildValue})`);
console.log('');
console.log(
` Switched to: ${replacement} (build: ${buildValue})`
);
console.log("");
// Rewrite the import path and let the normal resolution continue
return build.resolve(replacement, {
@@ -215,12 +236,18 @@ function dynamicImportSwitcherPlugin(buildValue) {
build.onEnd((result) => {
if (switches.length > 0) {
console.log(`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`);
console.log(
`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`
);
switches.forEach((s, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), s.file)}`);
console.log(` ${s.originalPath} ${s.replacementPath}`);
console.log(
` ${i + 1}. ${path.relative(process.cwd(), s.file)}`
);
console.log(
` ${s.originalPath}${s.replacementPath}`
);
});
console.log('');
console.log("");
}
});
}
@@ -235,7 +262,7 @@ esbuild
format: "esm",
minify: false,
banner: {
js: banner,
js: banner
},
platform: "node",
external: ["body-parser"],
@@ -244,20 +271,22 @@ esbuild
dynamicImportGuardPlugin(),
dynamicImportSwitcherPlugin(argv.build),
nodeExternalsPlugin({
packagePath: getPackagePaths(),
}),
packagePath: getPackagePaths()
})
],
sourcemap: "inline",
target: "node22",
target: "node22"
})
.then((result) => {
// Check if there were any errors in the build result
if (result.errors && result.errors.length > 0) {
console.error(`Build failed with ${result.errors.length} error(s):`);
console.error(
`Build failed with ${result.errors.length} error(s):`
);
result.errors.forEach((error, i) => {
console.error(`${i + 1}. ${error.text}`);
if (error.notes) {
error.notes.forEach(note => {
error.notes.forEach((note) => {
console.error(` - ${note.text}`);
});
}

View File

@@ -1,19 +1,19 @@
import tseslint from 'typescript-eslint';
import tseslint from "typescript-eslint";
export default tseslint.config({
files: ["**/*.{ts,tsx,js,jsx}"],
languageOptions: {
parser: tseslint.parser,
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
ecmaFeatures: {
jsx: true
}
files: ["**/*.{ts,tsx,js,jsx}"],
languageOptions: {
parser: tseslint.parser,
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
ecmaFeatures: {
jsx: true
}
}
},
rules: {
semi: "error",
"prefer-const": "warn"
}
},
rules: {
"semi": "error",
"prefer-const": "warn"
}
});
});

View File

@@ -3,8 +3,8 @@ module installer
go 1.24.0
require (
golang.org/x/term v0.37.0
golang.org/x/term v0.38.0
gopkg.in/yaml.v3 v3.0.1
)
require golang.org/x/sys v0.38.0 // indirect
require golang.org/x/sys v0.39.0 // indirect

View File

@@ -1,7 +1,7 @@
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@@ -1043,7 +1043,7 @@
"actionDeleteSite": "Standort löschen",
"actionGetSite": "Standort abrufen",
"actionListSites": "Standorte auflisten",
"actionApplyBlueprint": "Blaupause anwenden",
"actionApplyBlueprint": "Blueprint anwenden",
"setupToken": "Setup-Token",
"setupTokenDescription": "Geben Sie das Setup-Token von der Serverkonsole ein.",
"setupTokenRequired": "Setup-Token ist erforderlich",
@@ -1102,7 +1102,7 @@
"actionDeleteIdpOrg": "IDP-Organisationsrichtlinie löschen",
"actionListIdpOrgs": "IDP-Organisationen auflisten",
"actionUpdateIdpOrg": "IDP-Organisation aktualisieren",
"actionCreateClient": "Endgerät anlegen",
"actionCreateClient": "Client erstellen",
"actionDeleteClient": "Client löschen",
"actionUpdateClient": "Client aktualisieren",
"actionListClients": "Clients auflisten",
@@ -1201,24 +1201,24 @@
"sidebarLogsAnalytics": "Analytik",
"blueprints": "Baupläne",
"blueprintsDescription": "Deklarative Konfigurationen anwenden und vorherige Abläufe anzeigen",
"blueprintAdd": "Blaupause hinzufügen",
"blueprintGoBack": "Alle Blaupausen ansehen",
"blueprintCreate": "Blaupause erstellen",
"blueprintCreateDescription2": "Folge den Schritten unten, um eine neue Blaupause zu erstellen und anzuwenden",
"blueprintDetails": "Blaupausendetails",
"blueprintDetailsDescription": "Siehe das Ergebnis der angewendeten Blaupause und alle aufgetretenen Fehler",
"blueprintInfo": "Blaupauseninformation",
"blueprintAdd": "Blueprint hinzufügen",
"blueprintGoBack": "Alle Blueprints ansehen",
"blueprintCreate": "Blueprint erstellen",
"blueprintCreateDescription2": "Folge den unten aufgeführten Schritten, um einen neuen Blueprint zu erstellen und anzuwenden",
"blueprintDetails": "Blueprint Detailinformationen",
"blueprintDetailsDescription": "Siehe das Ergebnis des angewendeten Blueprints und alle aufgetretenen Fehler",
"blueprintInfo": "Blueprint Informationen",
"message": "Nachricht",
"blueprintContentsDescription": "Den YAML-Inhalt definieren, der die Infrastruktur beschreibt",
"blueprintErrorCreateDescription": "Fehler beim Anwenden der Blaupause",
"blueprintErrorCreate": "Fehler beim Erstellen der Blaupause",
"searchBlueprintProgress": "Blaupausen suchen...",
"blueprintErrorCreateDescription": "Fehler beim Anwenden des Blueprints",
"blueprintErrorCreate": "Fehler beim Erstellen des Blueprints",
"searchBlueprintProgress": "Blueprints suchen...",
"appliedAt": "Angewandt am",
"source": "Quelle",
"contents": "Inhalt",
"parsedContents": "Analysierte Inhalte (Nur lesen)",
"enableDockerSocket": "Docker Blaupause aktivieren",
"enableDockerSocketDescription": "Aktiviere Docker-Socket-Label-Scraping für Blaupausenbeschriftungen. Der Socket-Pfad muss neu angegeben werden.",
"enableDockerSocket": "Docker Blueprint aktivieren",
"enableDockerSocketDescription": "Aktiviere Docker-Socket-Label-Scraping für Blueprintbeschriftungen. Der Socket-Pfad muss neu angegeben werden.",
"enableDockerSocketLink": "Mehr erfahren",
"viewDockerContainers": "Docker Container anzeigen",
"containersIn": "Container in {siteName}",
@@ -1543,7 +1543,7 @@
"healthCheckPathRequired": "Gesundheits-Check-Pfad ist erforderlich",
"healthCheckMethodRequired": "HTTP-Methode ist erforderlich",
"healthCheckIntervalMin": "Prüfintervall muss mindestens 5 Sekunden betragen",
"healthCheckTimeoutMin": "Timeout muss mindestens 1 Sekunde betragen",
"healthCheckTimeoutMin": "Zeitüberschreitung muss mindestens 1 Sekunde betragen",
"healthCheckRetryMin": "Wiederholungsversuche müssen mindestens 1 betragen",
"httpMethod": "HTTP-Methode",
"selectHttpMethod": "HTTP-Methode auswählen",

View File

@@ -2067,6 +2067,8 @@
"timestamp": "Timestamp",
"accessLogs": "Access Logs",
"exportCsv": "Export CSV",
"exportError": "Unknown error when exporting CSV",
"exportCsvTooltip": "Within Time Range",
"actorId": "Actor ID",
"allowedByRule": "Allowed by Rule",
"allowedNoAuth": "Allowed No Auth",
@@ -2270,5 +2272,8 @@
"remoteExitNodeRegenerateAndDisconnectWarning": "This will regenerate the credentials and immediately disconnect the remote exit node. The remote exit node will need to be restarted with the new credentials.",
"remoteExitNodeRegenerateCredentialsConfirmation": "Are you sure you want to regenerate the credentials for this remote exit node?",
"remoteExitNodeRegenerateCredentialsWarning": "This will regenerate the credentials. The remote exit node will stay connected until you manually restart it and use the new credentials.",
"agent": "Agent"
"agent": "Agent",
"personalUseOnly": "Personal Use Only",
"loginPageLicenseWatermark": "This instance is licensed for personal use only.",
"instanceIsUnlicensed": "This instance is unlicensed."
}

4431
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -29,16 +29,17 @@
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
"start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs",
"email": "email dev --dir server/emails/templates --port 3005",
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs"
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs",
"format": "prettier --write ."
},
"dependencies": {
"@asteasolutions/zod-to-openapi": "8.1.0",
"@faker-js/faker": "^10.1.0",
"@headlessui/react": "^2.2.9",
"@aws-sdk/client-s3": "3.943.0",
"@asteasolutions/zod-to-openapi": "8.2.0",
"@aws-sdk/client-s3": "3.948.0",
"@faker-js/faker": "10.1.0",
"@headlessui/react": "2.2.9",
"@hookform/resolvers": "5.2.2",
"@monaco-editor/react": "^4.7.0",
"@node-rs/argon2": "^2.0.2",
"@monaco-editor/react": "4.7.0",
"@node-rs/argon2": "2.0.2",
"@oslojs/crypto": "1.0.1",
"@oslojs/encoding": "1.1.0",
"@radix-ui/react-avatar": "1.1.11",
@@ -49,138 +50,132 @@
"@radix-ui/react-icons": "1.3.2",
"@radix-ui/react-label": "2.1.8",
"@radix-ui/react-popover": "1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-progress": "1.1.8",
"@radix-ui/react-radio-group": "1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-scroll-area": "1.2.10",
"@radix-ui/react-select": "2.2.6",
"@radix-ui/react-separator": "1.1.8",
"@radix-ui/react-slot": "1.2.4",
"@radix-ui/react-switch": "1.2.6",
"@radix-ui/react-tabs": "1.1.13",
"@radix-ui/react-toast": "1.2.15",
"@radix-ui/react-tooltip": "^1.2.8",
"@react-email/components": "0.5.7",
"@react-email/render": "^1.3.2",
"@react-email/tailwind": "1.2.2",
"@simplewebauthn/browser": "^13.2.2",
"@simplewebauthn/server": "^13.2.2",
"@tailwindcss/forms": "^0.5.10",
"@tanstack/react-query": "^5.90.6",
"@radix-ui/react-tooltip": "1.2.8",
"@react-email/components": "1.0.1",
"@react-email/render": "2.0.0",
"@react-email/tailwind": "2.0.1",
"@simplewebauthn/browser": "13.2.2",
"@simplewebauthn/server": "13.2.2",
"@tailwindcss/forms": "0.5.10",
"@tanstack/react-query": "5.90.12",
"@tanstack/react-table": "8.21.3",
"arctic": "^3.7.0",
"axios": "^1.13.2",
"better-sqlite3": "11.7.0",
"arctic": "3.7.0",
"axios": "1.13.2",
"better-sqlite3": "11.9.1",
"canvas-confetti": "1.9.4",
"class-variance-authority": "^0.7.1",
"class-variance-authority": "0.7.1",
"clsx": "2.1.1",
"cmdk": "1.1.1",
"cookie": "^1.0.2",
"cookie": "1.1.1",
"cookie-parser": "1.4.7",
"cookies": "^0.9.1",
"cookies": "0.9.1",
"cors": "2.8.5",
"crypto-js": "^4.2.0",
"d3": "^7.9.0",
"crypto-js": "4.2.0",
"d3": "7.9.0",
"date-fns": "4.1.0",
"drizzle-orm": "0.45.0",
"eslint": "9.39.1",
"eslint-config-next": "16.0.7",
"eslint-config-next": "16.0.8",
"express": "5.2.1",
"express-rate-limit": "8.2.1",
"glob": "11.1.0",
"glob": "13.0.0",
"helmet": "8.1.0",
"http-errors": "2.0.1",
"i": "^0.3.7",
"i": "0.3.7",
"input-otp": "1.4.2",
"ioredis": "5.8.2",
"jmespath": "^0.16.0",
"jmespath": "0.16.0",
"js-yaml": "4.1.1",
"jsonwebtoken": "^9.0.2",
"lucide-react": "^0.556.0",
"jsonwebtoken": "9.0.3",
"lucide-react": "0.559.0",
"maxmind": "5.0.1",
"moment": "2.30.1",
"next": "15.5.7",
"next-intl": "^4.4.0",
"next": "15.5.9",
"next-intl": "4.5.8",
"next-themes": "0.4.6",
"nextjs-toploader": "^3.9.17",
"nextjs-toploader": "3.9.17",
"node-cache": "5.1.2",
"node-fetch": "3.3.2",
"nodemailer": "7.0.11",
"npm": "^11.6.4",
"nprogress": "^0.2.0",
"npm": "11.7.0",
"nprogress": "0.2.0",
"oslo": "1.2.1",
"pg": "^8.16.2",
"posthog-node": "^5.11.2",
"pg": "8.16.3",
"posthog-node": "5.17.2",
"qrcode.react": "4.2.0",
"react": "19.2.1",
"react-day-picker": "9.11.3",
"react-dom": "19.2.1",
"react-easy-sort": "^1.8.0",
"react": "19.2.3",
"react-day-picker": "9.12.0",
"react-dom": "19.2.3",
"react-easy-sort": "1.8.0",
"react-hook-form": "7.68.0",
"react-icons": "^5.5.0",
"react-icons": "5.5.0",
"rebuild": "0.1.2",
"recharts": "^2.15.4",
"reodotdev": "^1.0.0",
"resend": "^6.4.2",
"semver": "^7.7.3",
"stripe": "18.2.1",
"swagger-ui-express": "^5.0.1",
"topojson-client": "^3.1.0",
"recharts": "2.15.4",
"reodotdev": "1.0.0",
"resend": "6.6.0",
"semver": "7.7.3",
"stripe": "20.0.0",
"swagger-ui-express": "5.0.1",
"tailwind-merge": "3.4.0",
"tw-animate-css": "^1.3.8",
"uuid": "^13.0.0",
"topojson-client": "3.1.0",
"tw-animate-css": "1.4.0",
"uuid": "13.0.0",
"vaul": "1.1.2",
"visionscarto-world-atlas": "^1.0.0",
"winston": "3.18.3",
"visionscarto-world-atlas": "1.0.0",
"winston": "3.19.0",
"winston-daily-rotate-file": "5.0.0",
"ws": "8.18.3",
"yaml": "^2.8.1",
"yaml": "2.8.2",
"yargs": "18.0.0",
"zod": "4.1.12",
"zod": "4.1.13",
"zod-validation-error": "5.0.0"
},
"devDependencies": {
"@dotenvx/dotenvx": "1.51.1",
"@esbuild-plugins/tsconfig-paths": "0.1.2",
"@react-email/preview-server": "4.3.2",
"@tailwindcss/postcss": "^4.1.17",
"@tanstack/react-query-devtools": "^5.90.2",
"@types/better-sqlite3": "7.6.12",
"@tailwindcss/postcss": "4.1.17",
"@tanstack/react-query-devtools": "5.91.1",
"@types/better-sqlite3": "7.6.13",
"@types/cookie-parser": "1.4.10",
"@types/cors": "2.8.19",
"@types/crypto-js": "^4.2.2",
"@types/d3": "^7.4.3",
"@types/crypto-js": "4.2.2",
"@types/d3": "7.4.3",
"@types/express": "5.0.6",
"@types/express-session": "^1.18.2",
"@types/jmespath": "^0.15.2",
"@types/js-yaml": "4.0.9",
"@types/jsonwebtoken": "^9.0.10",
"@types/node": "24.10.1",
"@types/nprogress": "^0.2.3",
"@types/express-session": "1.18.2",
"@types/jmespath": "0.15.2",
"@types/jsonwebtoken": "9.0.10",
"@types/node": "24.10.2",
"@types/nodemailer": "7.0.4",
"@types/pg": "8.15.6",
"@types/nprogress": "0.2.3",
"@types/pg": "8.16.0",
"@types/react": "19.2.7",
"@types/react-dom": "19.2.3",
"@types/semver": "^7.7.1",
"@types/swagger-ui-express": "^4.1.8",
"@types/topojson-client": "^3.1.5",
"@types/semver": "7.7.1",
"@types/swagger-ui-express": "4.1.8",
"@types/topojson-client": "3.1.5",
"@types/ws": "8.18.1",
"babel-plugin-react-compiler": "^1.0.0",
"@types/yargs": "17.0.35",
"@types/js-yaml": "4.0.9",
"babel-plugin-react-compiler": "1.0.0",
"drizzle-kit": "0.31.8",
"esbuild": "0.27.1",
"esbuild-node-externals": "1.20.1",
"postcss": "^8",
"react-email": "4.3.2",
"tailwindcss": "^4.1.4",
"postcss": "8.5.6",
"prettier": "3.7.4",
"react-email": "5.0.7",
"tailwindcss": "4.1.17",
"tsc-alias": "1.8.16",
"tsx": "4.21.0",
"typescript": "^5",
"typescript-eslint": "^8.46.3"
},
"overrides": {
"emblor": {
"react": "19.0.0",
"react-dom": "19.0.0"
}
"typescript": "5.9.3",
"typescript-eslint": "8.49.0"
}
}
}

View File

@@ -1,8 +1,8 @@
/** @type {import('postcss-load-config').Config} */
const config = {
plugins: {
"@tailwindcss/postcss": {},
},
"@tailwindcss/postcss": {}
}
};
export default config;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 687 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

After

Width:  |  Height:  |  Size: 493 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 636 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

After

Width:  |  Height:  |  Size: 484 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 421 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 484 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 674 KiB

After

Width:  |  Height:  |  Size: 396 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 434 KiB

View File

@@ -2,13 +2,13 @@ import { hash, verify } from "@node-rs/argon2";
export async function verifyPassword(
password: string,
hash: string,
hash: string
): Promise<boolean> {
const validPassword = await verify(hash, password, {
memoryCost: 19456,
timeCost: 2,
outputLen: 32,
parallelism: 1,
parallelism: 1
});
return validPassword;
}
@@ -18,7 +18,7 @@ export async function hashPassword(password: string): Promise<string> {
memoryCost: 19456,
timeCost: 2,
outputLen: 32,
parallelism: 1,
parallelism: 1
});
return passwordHash;

View File

@@ -4,10 +4,13 @@ export const passwordSchema = z
.string()
.min(8, { message: "Password must be at least 8 characters long" })
.max(128, { message: "Password must be at most 128 characters long" })
.regex(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/, {
message: `Your password must meet the following conditions:
.regex(
/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/,
{
message: `Your password must meet the following conditions:
at least one uppercase English letter,
at least one lowercase English letter,
at least one digit,
at least one special character.`
});
}
);

View File

@@ -1,6 +1,4 @@
import {
encodeHexLowerCase,
} from "@oslojs/encoding";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { Newt, newts, newtSessions, NewtSession } from "@server/db";
import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createNewtSession(
token: string,
newtId: string,
newtId: string
): Promise<NewtSession> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const session: NewtSession = {
sessionId: sessionId,
newtId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(),
expiresAt: new Date(Date.now() + EXPIRES).getTime()
};
await db.insert(newtSessions).values(session);
return session;
}
export async function validateNewtSessionToken(
token: string,
token: string
): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const result = await db
.select({ newt: newts, session: newtSessions })
@@ -45,14 +43,12 @@ export async function validateNewtSessionToken(
.where(eq(newtSessions.sessionId, session.sessionId));
return { session: null, newt: null };
}
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) {
session.expiresAt = new Date(
Date.now() + EXPIRES,
).getTime();
if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
await db
.update(newtSessions)
.set({
expiresAt: session.expiresAt,
expiresAt: session.expiresAt
})
.where(eq(newtSessions.sessionId, session.sessionId));
}

View File

@@ -1,6 +1,4 @@
import {
encodeHexLowerCase,
} from "@oslojs/encoding";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { Olm, olms, olmSessions, OlmSession } from "@server/db";
import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createOlmSession(
token: string,
olmId: string,
olmId: string
): Promise<OlmSession> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const session: OlmSession = {
sessionId: sessionId,
olmId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(),
expiresAt: new Date(Date.now() + EXPIRES).getTime()
};
await db.insert(olmSessions).values(session);
return session;
}
export async function validateOlmSessionToken(
token: string,
token: string
): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const result = await db
.select({ olm: olms, session: olmSessions })
@@ -45,14 +43,12 @@ export async function validateOlmSessionToken(
.where(eq(olmSessions.sessionId, session.sessionId));
return { session: null, olm: null };
}
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) {
session.expiresAt = new Date(
Date.now() + EXPIRES,
).getTime();
if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
await db
.update(olmSessions)
.set({
expiresAt: session.expiresAt,
expiresAt: session.expiresAt
})
.where(eq(olmSessions.sessionId, session.sessionId));
}

View File

@@ -10,4 +10,4 @@ export async function initCleanup() {
// Handle process termination
process.on("SIGTERM", () => cleanup());
process.on("SIGINT", () => cleanup());
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1708,4 +1708,4 @@
"Desert Box Turtle",
"African Striped Weasel"
]
}
}

View File

@@ -215,42 +215,56 @@ export const sessionTransferToken = pgTable("sessionTransferToken", {
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const actionAuditLog = pgTable("actionAuditLog", {
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }).notNull(),
actor: varchar("actor", { length: 255 }).notNull(),
actorId: varchar("actorId", { length: 255 }).notNull(),
action: varchar("action", { length: 100 }).notNull(),
metadata: text("metadata")
}, (table) => ([
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const actionAuditLog = pgTable(
"actionAuditLog",
{
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }).notNull(),
actor: varchar("actor", { length: 255 }).notNull(),
actorId: varchar("actorId", { length: 255 }).notNull(),
action: varchar("action", { length: 100 }).notNull(),
metadata: text("metadata")
},
(table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = pgTable("accessAuditLog", {
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }),
actor: varchar("actor", { length: 255 }),
actorId: varchar("actorId", { length: 255 }),
resourceId: integer("resourceId"),
ip: varchar("ip", { length: 45 }),
type: varchar("type", { length: 100 }).notNull(),
action: boolean("action").notNull(),
location: text("location"),
userAgent: text("userAgent"),
metadata: text("metadata")
}, (table) => ([
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const accessAuditLog = pgTable(
"accessAuditLog",
{
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }),
actor: varchar("actor", { length: 255 }),
actorId: varchar("actorId", { length: 255 }),
resourceId: integer("resourceId"),
ip: varchar("ip", { length: 45 }),
type: varchar("type", { length: 100 }).notNull(),
action: boolean("action").notNull(),
location: text("location"),
userAgent: text("userAgent"),
metadata: text("metadata")
},
(table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>;
@@ -270,4 +284,4 @@ export type RemoteExitNodeSession = InferSelectModel<
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -177,7 +177,7 @@ export const targetHealthCheck = pgTable("targetHealthCheck", {
hcMethod: varchar("hcMethod").default("GET"),
hcStatus: integer("hcStatus"), // http code
hcHealth: text("hcHealth").default("unknown"), // "unknown", "healthy", "unhealthy"
hcTlsServerName: text("hcTlsServerName"),
hcTlsServerName: text("hcTlsServerName")
});
export const exitNodes = pgTable("exitNodes", {

View File

@@ -52,10 +52,7 @@ export async function getResourceByDomain(
resourceHeaderAuth,
eq(resourceHeaderAuth.resourceId, resources.resourceId)
)
.innerJoin(
orgs,
eq(orgs.orgId, resources.orgId)
)
.innerJoin(orgs, eq(orgs.orgId, resources.orgId))
.where(eq(resources.fullDomain, domain))
.limit(1);

View File

@@ -8,7 +8,7 @@ const runMigrations = async () => {
console.log("Running migrations...");
try {
migrate(db as any, {
migrationsFolder: migrationsFolder,
migrationsFolder: migrationsFolder
});
console.log("Migrations completed successfully.");
} catch (error) {

View File

@@ -29,7 +29,9 @@ export const certificates = sqliteTable("certificates", {
});
export const dnsChallenge = sqliteTable("dnsChallenges", {
dnsChallengeId: integer("dnsChallengeId").primaryKey({ autoIncrement: true }),
dnsChallengeId: integer("dnsChallengeId").primaryKey({
autoIncrement: true
}),
domain: text("domain").notNull(),
token: text("token").notNull(),
keyAuthorization: text("keyAuthorization").notNull(),
@@ -61,9 +63,7 @@ export const customers = sqliteTable("customers", {
});
export const subscriptions = sqliteTable("subscriptions", {
subscriptionId: text("subscriptionId")
.primaryKey()
.notNull(),
subscriptionId: text("subscriptionId").primaryKey().notNull(),
customerId: text("customerId")
.notNull()
.references(() => customers.customerId, { onDelete: "cascade" }),
@@ -75,7 +75,9 @@ export const subscriptions = sqliteTable("subscriptions", {
});
export const subscriptionItems = sqliteTable("subscriptionItems", {
subscriptionItemId: integer("subscriptionItemId").primaryKey({ autoIncrement: true }),
subscriptionItemId: integer("subscriptionItemId").primaryKey({
autoIncrement: true
}),
subscriptionId: text("subscriptionId")
.notNull()
.references(() => subscriptions.subscriptionId, {
@@ -129,7 +131,9 @@ export const limits = sqliteTable("limits", {
});
export const usageNotifications = sqliteTable("usageNotifications", {
notificationId: integer("notificationId").primaryKey({ autoIncrement: true }),
notificationId: integer("notificationId").primaryKey({
autoIncrement: true
}),
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
@@ -210,42 +214,56 @@ export const sessionTransferToken = sqliteTable("sessionTransferToken", {
expiresAt: integer("expiresAt").notNull()
});
export const actionAuditLog = sqliteTable("actionAuditLog", {
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType").notNull(),
actor: text("actor").notNull(),
actorId: text("actorId").notNull(),
action: text("action").notNull(),
metadata: text("metadata")
}, (table) => ([
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const actionAuditLog = sqliteTable(
"actionAuditLog",
{
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType").notNull(),
actor: text("actor").notNull(),
actorId: text("actorId").notNull(),
action: text("action").notNull(),
metadata: text("metadata")
},
(table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = sqliteTable("accessAuditLog", {
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType"),
actor: text("actor"),
actorId: text("actorId"),
resourceId: integer("resourceId"),
ip: text("ip"),
location: text("location"),
type: text("type").notNull(),
action: integer("action", { mode: "boolean" }).notNull(),
userAgent: text("userAgent"),
metadata: text("metadata")
}, (table) => ([
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const accessAuditLog = sqliteTable(
"accessAuditLog",
{
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType"),
actor: text("actor"),
actorId: text("actorId"),
resourceId: integer("resourceId"),
ip: text("ip"),
location: text("location"),
type: text("type").notNull(),
action: integer("action", { mode: "boolean" }).notNull(),
userAgent: text("userAgent"),
metadata: text("metadata")
},
(table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>;
@@ -265,4 +283,4 @@ export type RemoteExitNodeSession = InferSelectModel<
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -18,10 +18,13 @@ function createEmailClient() {
host: emailConfig.smtp_host,
port: emailConfig.smtp_port,
secure: emailConfig.smtp_secure || false,
auth: (emailConfig.smtp_user && emailConfig.smtp_pass) ? {
user: emailConfig.smtp_user,
pass: emailConfig.smtp_pass
} : null
auth:
emailConfig.smtp_user && emailConfig.smtp_pass
? {
user: emailConfig.smtp_user,
pass: emailConfig.smtp_pass
}
: null
} as SMTPTransport.Options;
if (emailConfig.smtp_tls_reject_unauthorized !== undefined) {

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page
}
export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => {
export const NotifyUsageLimitApproaching = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `Your usage for ${limitName} is approaching the limit.`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -37,23 +43,32 @@ export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, us
<EmailGreeting>Hi there,</EmailGreeting>
<EmailText>
We wanted to let you know that your usage for <strong>{limitName}</strong> is approaching your plan limit.
We wanted to let you know that your usage for{" "}
<strong>{limitName}</strong> is approaching your
plan limit.
</EmailText>
<EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%)
<strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText>
<EmailText>
Once you reach your limit, some functionality may be restricted or your sites may disconnect until you upgrade your plan or your usage resets.
Once you reach your limit, some functionality may be
restricted or your sites may disconnect until you
upgrade your plan or your usage resets.
</EmailText>
<EmailText>
To avoid any interruption to your service, we recommend upgrading your plan or monitoring your usage closely. You can <a href={billingLink}>upgrade your plan here</a>.
To avoid any interruption to your service, we
recommend upgrading your plan or monitoring your
usage closely. You can{" "}
<a href={billingLink}>upgrade your plan here</a>.
</EmailText>
<EmailText>
If you have any questions or need assistance, please don't hesitate to reach out to our support team.
If you have any questions or need assistance, please
don't hesitate to reach out to our support team.
</EmailText>
<EmailFooter>

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page
}
export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => {
export const NotifyUsageLimitReached = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `You've reached your ${limitName} usage limit - Action required`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -32,30 +38,48 @@ export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageL
<EmailContainer>
<EmailLetterHead />
<EmailHeading>Usage Limit Reached - Action Required</EmailHeading>
<EmailHeading>
Usage Limit Reached - Action Required
</EmailHeading>
<EmailGreeting>Hi there,</EmailGreeting>
<EmailText>
You have reached your usage limit for <strong>{limitName}</strong>.
You have reached your usage limit for{" "}
<strong>{limitName}</strong>.
</EmailText>
<EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%)
<strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText>
<EmailText>
<strong>Important:</strong> Your functionality may now be restricted and your sites may disconnect until you either upgrade your plan or your usage resets. To prevent any service interruption, immediate action is recommended.
<strong>Important:</strong> Your functionality may
now be restricted and your sites may disconnect
until you either upgrade your plan or your usage
resets. To prevent any service interruption,
immediate action is recommended.
</EmailText>
<EmailText>
<strong>What you can do:</strong>
<br /> <a href={billingLink} style={{ color: '#2563eb', fontWeight: 'bold' }}>Upgrade your plan immediately</a> to restore full functionality
<br /> Monitor your usage to stay within limits in the future
<br />{" "}
<a
href={billingLink}
style={{ color: "#2563eb", fontWeight: "bold" }}
>
Upgrade your plan immediately
</a>{" "}
to restore full functionality
<br /> Monitor your usage to stay within limits in
the future
</EmailText>
<EmailText>
If you have any questions or need immediate assistance, please contact our support team right away.
If you have any questions or need immediate
assistance, please contact our support team right
away.
</EmailText>
<EmailFooter>

View File

@@ -5,7 +5,7 @@ import config from "@server/lib/config";
import logger from "@server/logger";
import {
errorHandlerMiddleware,
notFoundMiddleware,
notFoundMiddleware
} from "@server/middlewares";
import { authenticated, unauthenticated } from "#dynamic/routers/integration";
import { logIncomingMiddleware } from "./middlewares/logIncoming";

View File

@@ -25,16 +25,22 @@ export const FeatureMeterIdsSandbox: Record<FeatureId, string> = {
};
export function getFeatureMeterId(featureId: FeatureId): string {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") {
if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return FeatureMeterIds[featureId];
} else {
return FeatureMeterIdsSandbox[featureId];
}
}
export function getFeatureIdByMetricId(metricId: string): FeatureId | undefined {
return (Object.entries(FeatureMeterIds) as [FeatureId, string][])
.find(([_, v]) => v === metricId)?.[0];
export function getFeatureIdByMetricId(
metricId: string
): FeatureId | undefined {
return (Object.entries(FeatureMeterIds) as [FeatureId, string][]).find(
([_, v]) => v === metricId
)?.[0];
}
export type FeaturePriceSet = {
@@ -43,7 +49,8 @@ export type FeaturePriceSet = {
[FeatureId.DOMAINS]?: string; // Optional since domains are not billed
};
export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches the freeLimitSet
export const standardFeaturePriceSet: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF",
[FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea",
[FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk",
@@ -51,7 +58,8 @@ export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches t
[FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h"
};
export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier matches the freeLimitSet
export const standardFeaturePriceSetSandbox: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU",
[FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF",
[FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0",
@@ -60,15 +68,20 @@ export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier ma
};
export function getStandardFeaturePriceSet(): FeaturePriceSet {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") {
if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return standardFeaturePriceSet;
} else {
return standardFeaturePriceSetSandbox;
}
}
export function getLineItems(featurePriceSet: FeaturePriceSet): Stripe.Checkout.SessionCreateParams.LineItem[] {
export function getLineItems(
featurePriceSet: FeaturePriceSet
): Stripe.Checkout.SessionCreateParams.LineItem[] {
return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({
price: priceId,
price: priceId
}));
}
}

View File

@@ -2,4 +2,4 @@ export * from "./limitSet";
export * from "./features";
export * from "./limitsService";
export * from "./getOrgTierData";
export * from "./createCustomer";
export * from "./createCustomer";

View File

@@ -12,7 +12,7 @@ export const sandboxLimitSet: LimitSet = {
[FeatureId.USERS]: { value: 1, description: "Sandbox limit" },
[FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB
[FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" }
};
export const freeLimitSet: LimitSet = {
@@ -29,7 +29,7 @@ export const freeLimitSet: LimitSet = {
export const subscribedLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: {
value: 2232000,
description: "Contact us to increase soft limit.",
description: "Contact us to increase soft limit."
}, // 50 sites up for 31 days
[FeatureId.USERS]: {
value: 150,
@@ -38,7 +38,7 @@ export const subscribedLimitSet: LimitSet = {
[FeatureId.EGRESS_DATA_MB]: {
value: 12000000,
description: "Contact us to increase soft limit."
}, // 12000 GB
}, // 12000 GB
[FeatureId.DOMAINS]: {
value: 25,
description: "Contact us to increase soft limit."

View File

@@ -1,22 +1,32 @@
export enum TierId {
STANDARD = "standard",
STANDARD = "standard"
}
export type TierPriceSet = {
[key in TierId]: string;
};
export const tierPriceSet: TierPriceSet = { // Free tier matches the freeLimitSet
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0",
export const tierPriceSet: TierPriceSet = {
// Free tier matches the freeLimitSet
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0"
};
export const tierPriceSetSandbox: TierPriceSet = { // Free tier matches the freeLimitSet
export const tierPriceSetSandbox: TierPriceSet = {
// Free tier matches the freeLimitSet
// when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value
[TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m",
[TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m"
};
export function getTierPriceSet(environment?: string, sandbox_mode?: boolean): TierPriceSet {
if ((process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") || (environment === "prod" && sandbox_mode !== true)) { // THIS GETS LOADED CLIENT SIDE AND SERVER SIDE
export function getTierPriceSet(
environment?: string,
sandbox_mode?: boolean
): TierPriceSet {
if (
(process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true") ||
(environment === "prod" && sandbox_mode !== true)
) {
// THIS GETS LOADED CLIENT SIDE AND SERVER SIDE
return tierPriceSet;
} else {
return tierPriceSetSandbox;

View File

@@ -19,7 +19,7 @@ import logger from "@server/logger";
import { sendToClient } from "#dynamic/routers/ws";
import { build } from "@server/build";
import { s3Client } from "@server/lib/s3";
import cache from "@server/lib/cache";
import cache from "@server/lib/cache";
interface StripeEvent {
identifier?: string;

View File

@@ -34,7 +34,10 @@ export async function applyNewtDockerBlueprint(
return;
}
if (isEmptyObject(blueprint["proxy-resources"]) && isEmptyObject(blueprint["client-resources"])) {
if (
isEmptyObject(blueprint["proxy-resources"]) &&
isEmptyObject(blueprint["client-resources"])
) {
return;
}

View File

@@ -84,12 +84,20 @@ export function processContainerLabels(containers: Container[]): {
// Process proxy resources
if (Object.keys(proxyResourceLabels).length > 0) {
processResourceLabels(proxyResourceLabels, container, result["proxy-resources"]);
processResourceLabels(
proxyResourceLabels,
container,
result["proxy-resources"]
);
}
// Process client resources
if (Object.keys(clientResourceLabels).length > 0) {
processResourceLabels(clientResourceLabels, container, result["client-resources"]);
processResourceLabels(
clientResourceLabels,
container,
result["client-resources"]
);
}
});
@@ -161,8 +169,7 @@ function processResourceLabels(
const finalTarget = { ...target };
if (!finalTarget.hostname) {
finalTarget.hostname =
container.name ||
container.hostname;
container.name || container.hostname;
}
if (!finalTarget.port) {
const containerPort =

View File

@@ -1086,10 +1086,8 @@ async function getDomainId(
// remove the base domain of the domain
let subdomain = null;
if (domainSelection.type == "ns" || domainSelection.type == "wildcard") {
if (fullDomain != baseDomain) {
subdomain = fullDomain.replace(`.${baseDomain}`, "");
}
if (fullDomain != baseDomain) {
subdomain = fullDomain.replace(`.${baseDomain}`, "");
}
// Return the first valid domain

View File

@@ -312,7 +312,7 @@ export const ConfigSchema = z
};
delete (data as any)["public-resources"];
}
// Merge private-resources into client-resources
if (data["private-resources"]) {
data["client-resources"] = {
@@ -321,10 +321,13 @@ export const ConfigSchema = z
};
delete (data as any)["private-resources"];
}
return data as {
"proxy-resources": Record<string, z.infer<typeof ResourceSchema>>;
"client-resources": Record<string, z.infer<typeof ClientResourceSchema>>;
"client-resources": Record<
string,
z.infer<typeof ClientResourceSchema>
>;
sites: Record<string, z.infer<typeof SiteSchema>>;
};
})

View File

@@ -2,4 +2,4 @@ import NodeCache from "node-cache";
export const cache = new NodeCache({ stdTTL: 3600, checkperiod: 120 });
export default cache;
export default cache;

View File

@@ -166,7 +166,10 @@ export async function calculateUserClientsForOrgs(
];
// Get next available subnet
const newSubnet = await getNextAvailableClientSubnet(orgId);
const newSubnet = await getNextAvailableClientSubnet(
orgId,
transaction
);
if (!newSubnet) {
logger.warn(
`Skipping org ${orgId} for OLM ${olm.olmId} (user ${userId}): no available subnet found`

View File

@@ -1,4 +1,6 @@
export async function getValidCertificatesForDomains(domains: Set<string>): Promise<
export async function getValidCertificatesForDomains(
domains: Set<string>
): Promise<
Array<{
id: number;
domain: string;
@@ -10,4 +12,4 @@ export async function getValidCertificatesForDomains(domains: Set<string>): Prom
}>
> {
return []; // stub
}
}

View File

@@ -7,7 +7,10 @@ function dateToTimestamp(dateStr: string): number {
// Testable version of calculateCutoffTimestamp that accepts a "now" timestamp
// This matches the logic in cleanupLogs.ts but allows injecting the current time
function calculateCutoffTimestampWithNow(retentionDays: number, nowTimestamp: number): number {
function calculateCutoffTimestampWithNow(
retentionDays: number,
nowTimestamp: number
): number {
if (retentionDays === 9001) {
// Special case: data is erased at the end of the year following the year it was generated
// This means we delete logs from 2 years ago or older (logs from year Y are deleted after Dec 31 of year Y+1)
@@ -28,7 +31,7 @@ function testCalculateCutoffTimestamp() {
{
const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(30, now);
const expected = now - (30 * 24 * 60 * 60);
const expected = now - 30 * 24 * 60 * 60;
assertEquals(result, expected, "30 days retention calculation failed");
}
@@ -36,7 +39,7 @@ function testCalculateCutoffTimestamp() {
{
const now = dateToTimestamp("2025-06-15T00:00:00Z");
const result = calculateCutoffTimestampWithNow(90, now);
const expected = now - (90 * 24 * 60 * 60);
const expected = now - 90 * 24 * 60 * 60;
assertEquals(result, expected, "90 days retention calculation failed");
}
@@ -48,7 +51,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 2025) - should cutoff at Jan 1, 2024");
assertEquals(
result,
expected,
"9001 retention (Dec 2025) - should cutoff at Jan 1, 2024"
);
}
// Test 4: Special case 9001 - January 2026
@@ -58,7 +65,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 2026) - should cutoff at Jan 1, 2025");
assertEquals(
result,
expected,
"9001 retention (Jan 2026) - should cutoff at Jan 1, 2025"
);
}
// Test 5: Special case 9001 - December 31, 2025 at 23:59:59 UTC
@@ -68,7 +79,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-31T23:59:59Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024");
assertEquals(
result,
expected,
"9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024"
);
}
// Test 6: Special case 9001 - January 1, 2026 at 00:00:01 UTC
@@ -78,7 +93,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-01T00:00:01Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025");
assertEquals(
result,
expected,
"9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025"
);
}
// Test 7: Special case 9001 - Mid year 2025
@@ -87,7 +106,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-06-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (mid 2025) - should cutoff at Jan 1, 2024");
assertEquals(
result,
expected,
"9001 retention (mid 2025) - should cutoff at Jan 1, 2024"
);
}
// Test 8: Special case 9001 - Early 2024
@@ -96,14 +119,18 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2024-02-01T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2023-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (early 2024) - should cutoff at Jan 1, 2023");
assertEquals(
result,
expected,
"9001 retention (early 2024) - should cutoff at Jan 1, 2023"
);
}
// Test 9: 1 day retention
{
const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(1, now);
const expected = now - (1 * 24 * 60 * 60);
const expected = now - 1 * 24 * 60 * 60;
assertEquals(result, expected, "1 day retention calculation failed");
}
@@ -111,7 +138,7 @@ function testCalculateCutoffTimestamp() {
{
const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(365, now);
const expected = now - (365 * 24 * 60 * 60);
const expected = now - 365 * 24 * 60 * 60;
assertEquals(result, expected, "365 days retention calculation failed");
}
@@ -123,11 +150,19 @@ function testCalculateCutoffTimestamp() {
const cutoff = calculateCutoffTimestampWithNow(9001, now);
const logFromDec2023 = dateToTimestamp("2023-12-31T23:59:59Z");
const logFromJan2024 = dateToTimestamp("2024-01-01T00:00:00Z");
// Log from Dec 2023 should be before cutoff (deleted)
assertEquals(logFromDec2023 < cutoff, true, "Log from Dec 2023 should be deleted");
assertEquals(
logFromDec2023 < cutoff,
true,
"Log from Dec 2023 should be deleted"
);
// Log from Jan 2024 should be at or after cutoff (kept)
assertEquals(logFromJan2024 >= cutoff, true, "Log from Jan 2024 should be kept");
assertEquals(
logFromJan2024 >= cutoff,
true,
"Log from Jan 2024 should be kept"
);
}
// Test 12: Verify 9001 in 2026 - logs from 2024 should now be deleted
@@ -136,11 +171,19 @@ function testCalculateCutoffTimestamp() {
const cutoff = calculateCutoffTimestampWithNow(9001, now);
const logFromDec2024 = dateToTimestamp("2024-12-31T23:59:59Z");
const logFromJan2025 = dateToTimestamp("2025-01-01T00:00:00Z");
// Log from Dec 2024 should be before cutoff (deleted)
assertEquals(logFromDec2024 < cutoff, true, "Log from Dec 2024 should be deleted in 2026");
assertEquals(
logFromDec2024 < cutoff,
true,
"Log from Dec 2024 should be deleted in 2026"
);
// Log from Jan 2025 should be at or after cutoff (kept)
assertEquals(logFromJan2025 >= cutoff, true, "Log from Jan 2025 should be kept in 2026");
assertEquals(
logFromJan2025 >= cutoff,
true,
"Log from Jan 2025 should be kept in 2026"
);
}
// Test 13: Edge case - exactly at year boundary for 9001
@@ -149,7 +192,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-01-01T00:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024");
assertEquals(
result,
expected,
"9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024"
);
}
// Test 14: Verify data from 2024 is kept throughout 2025 when using 9001
@@ -157,18 +204,29 @@ function testCalculateCutoffTimestamp() {
{
// Running in June 2025
const nowJune2025 = dateToTimestamp("2025-06-15T12:00:00Z");
const cutoffJune2025 = calculateCutoffTimestampWithNow(9001, nowJune2025);
const cutoffJune2025 = calculateCutoffTimestampWithNow(
9001,
nowJune2025
);
const logFromJuly2024 = dateToTimestamp("2024-07-15T12:00:00Z");
// Log from July 2024 should be KEPT in June 2025
assertEquals(logFromJuly2024 >= cutoffJune2025, true, "Log from July 2024 should be kept in June 2025");
assertEquals(
logFromJuly2024 >= cutoffJune2025,
true,
"Log from July 2024 should be kept in June 2025"
);
// Running in January 2026
const nowJan2026 = dateToTimestamp("2026-01-15T12:00:00Z");
const cutoffJan2026 = calculateCutoffTimestampWithNow(9001, nowJan2026);
// Log from July 2024 should be DELETED in January 2026
assertEquals(logFromJuly2024 < cutoffJan2026, true, "Log from July 2024 should be deleted in Jan 2026");
assertEquals(
logFromJuly2024 < cutoffJan2026,
true,
"Log from July 2024 should be deleted in Jan 2026"
);
}
// Test 15: Verify the exact requirement - data from 2024 must be purged on December 31, 2025
@@ -176,16 +234,27 @@ function testCalculateCutoffTimestamp() {
// On Jan 1, 2026 (now 2026), data from 2024 can be deleted
{
const logFromMid2024 = dateToTimestamp("2024-06-15T12:00:00Z");
// Dec 31, 2025 23:59:59 - still 2025, log should be kept
const nowDec31_2025 = dateToTimestamp("2025-12-31T23:59:59Z");
const cutoffDec31 = calculateCutoffTimestampWithNow(9001, nowDec31_2025);
assertEquals(logFromMid2024 >= cutoffDec31, true, "Log from mid-2024 should be kept on Dec 31, 2025");
const cutoffDec31 = calculateCutoffTimestampWithNow(
9001,
nowDec31_2025
);
assertEquals(
logFromMid2024 >= cutoffDec31,
true,
"Log from mid-2024 should be kept on Dec 31, 2025"
);
// Jan 1, 2026 00:00:00 - now 2026, log can be deleted
const nowJan1_2026 = dateToTimestamp("2026-01-01T00:00:00Z");
const cutoffJan1 = calculateCutoffTimestampWithNow(9001, nowJan1_2026);
assertEquals(logFromMid2024 < cutoffJan1, true, "Log from mid-2024 should be deleted on Jan 1, 2026");
assertEquals(
logFromMid2024 < cutoffJan1,
true,
"Log from mid-2024 should be deleted on Jan 1, 2026"
);
}
console.log("All calculateCutoffTimestamp tests passed!");

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.13.0-rc.0";
export const APP_VERSION = "1.13.1";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -4,18 +4,20 @@ import { eq, and } from "drizzle-orm";
import { subdomainSchema } from "@server/lib/schemas";
import { fromError } from "zod-validation-error";
export type DomainValidationResult = {
success: true;
fullDomain: string;
subdomain: string | null;
} | {
success: false;
error: string;
};
export type DomainValidationResult =
| {
success: true;
fullDomain: string;
subdomain: string | null;
}
| {
success: false;
error: string;
};
/**
* Validates a domain and constructs the full domain based on domain type and subdomain.
*
*
* @param domainId - The ID of the domain to validate
* @param orgId - The organization ID to check domain access
* @param subdomain - Optional subdomain to append (for ns and wildcard domains)
@@ -34,7 +36,10 @@ export async function validateAndConstructDomain(
.where(eq(domains.domainId, domainId))
.leftJoin(
orgDomains,
and(eq(orgDomains.orgId, orgId), eq(orgDomains.domainId, domainId))
and(
eq(orgDomains.orgId, orgId),
eq(orgDomains.domainId, domainId)
)
);
// Check if domain exists
@@ -106,7 +111,7 @@ export async function validateAndConstructDomain(
} catch (error) {
return {
success: false,
error: `An error occurred while validating domain: ${error instanceof Error ? error.message : 'Unknown error'}`
error: `An error occurred while validating domain: ${error instanceof Error ? error.message : "Unknown error"}`
};
}
}

View File

@@ -1,39 +1,39 @@
import crypto from 'crypto';
import crypto from "crypto";
export function encryptData(data: string, key: Buffer): string {
const algorithm = 'aes-256-gcm';
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv);
let encrypted = cipher.update(data, 'utf8', 'hex');
encrypted += cipher.final('hex');
const authTag = cipher.getAuthTag();
// Combine IV, auth tag, and encrypted data
return iv.toString('hex') + ':' + authTag.toString('hex') + ':' + encrypted;
const algorithm = "aes-256-gcm";
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv);
let encrypted = cipher.update(data, "utf8", "hex");
encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag();
// Combine IV, auth tag, and encrypted data
return iv.toString("hex") + ":" + authTag.toString("hex") + ":" + encrypted;
}
// Helper function to decrypt data (you'll need this to read certificates)
export function decryptData(encryptedData: string, key: Buffer): string {
const algorithm = 'aes-256-gcm';
const parts = encryptedData.split(':');
if (parts.length !== 3) {
throw new Error('Invalid encrypted data format');
}
const iv = Buffer.from(parts[0], 'hex');
const authTag = Buffer.from(parts[1], 'hex');
const encrypted = parts[2];
const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag);
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
const algorithm = "aes-256-gcm";
const parts = encryptedData.split(":");
if (parts.length !== 3) {
throw new Error("Invalid encrypted data format");
}
const iv = Buffer.from(parts[0], "hex");
const authTag = Buffer.from(parts[1], "hex");
const encrypted = parts[2];
const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag);
let decrypted = decipher.update(encrypted, "hex", "utf8");
decrypted += decipher.final("utf8");
return decrypted;
}
// openssl rand -hex 32 > config/encryption.key
// openssl rand -hex 32 > config/encryption.key

View File

@@ -30,4 +30,4 @@ export async function getCurrentExitNodeId(): Promise<number> {
}
}
return currentExitNodeId;
}
}

View File

@@ -1,4 +1,4 @@
export * from "./exitNodes";
export * from "./exitNodeComms";
export * from "./subnet";
export * from "./getCurrentExitNodeId";
export * from "./getCurrentExitNodeId";

View File

@@ -27,4 +27,4 @@ export async function getNextAvailableSubnet(): Promise<string> {
"/" +
subnet.split("/")[1];
return subnet;
}
}

View File

@@ -30,4 +30,4 @@ export async function getCountryCodeForIp(
}
return;
}
}

View File

@@ -33,7 +33,11 @@ export async function generateOidcRedirectUrl(
)
.limit(1);
if (res?.loginPage && res.loginPage.domainId && res.loginPage.fullDomain) {
if (
res?.loginPage &&
res.loginPage.domainId &&
res.loginPage.fullDomain
) {
baseUrl = `${method}://${res.loginPage.fullDomain}`;
}
}

View File

@@ -4,7 +4,7 @@ import { assertEquals } from "@test/assert";
// Test cases
function testFindNextAvailableCidr() {
console.log("Running findNextAvailableCidr tests...");
// Test 0: Basic IPv4 allocation with a subnet in the wrong range
{
const existing = ["100.90.130.1/30", "100.90.128.4/30"];
@@ -23,7 +23,11 @@ function testFindNextAvailableCidr() {
{
const existing = ["10.0.0.0/16", "10.2.0.0/16"];
const result = findNextAvailableCidr(existing, 16, "10.0.0.0/8");
assertEquals(result, "10.1.0.0/16", "Finding gap between allocations failed");
assertEquals(
result,
"10.1.0.0/16",
"Finding gap between allocations failed"
);
}
// Test 3: No available space
@@ -33,7 +37,7 @@ function testFindNextAvailableCidr() {
assertEquals(result, null, "No available space test failed");
}
// Test 4: Empty existing
// Test 4: Empty existing
{
const existing: string[] = [];
const result = findNextAvailableCidr(existing, 30, "10.0.0.0/8");
@@ -137,4 +141,4 @@ try {
} catch (error) {
console.error("Test failed:", error);
process.exit(1);
}
}

View File

@@ -116,6 +116,70 @@ function bigIntToIp(num: bigint, version: IPVersion): string {
}
}
/**
* Parses an endpoint string (ip:port) handling both IPv4 and IPv6 addresses.
* IPv6 addresses may be bracketed like [::1]:8080 or unbracketed like ::1:8080.
* For unbracketed IPv6, the last colon-separated segment is treated as the port.
*
* @param endpoint The endpoint string to parse (e.g., "192.168.1.1:8080" or "[::1]:8080" or "2607:fea8::1:8080")
* @returns An object with ip and port, or null if parsing fails
*/
export function parseEndpoint(
endpoint: string
): { ip: string; port: number } | null {
if (!endpoint) return null;
// Check for bracketed IPv6 format: [ip]:port
const bracketedMatch = endpoint.match(/^\[([^\]]+)\]:(\d+)$/);
if (bracketedMatch) {
const ip = bracketedMatch[1];
const port = parseInt(bracketedMatch[2], 10);
if (isNaN(port)) return null;
return { ip, port };
}
// Check if this looks like IPv6 (contains multiple colons)
const colonCount = (endpoint.match(/:/g) || []).length;
if (colonCount > 1) {
// This is IPv6 - the port is after the last colon
const lastColonIndex = endpoint.lastIndexOf(":");
const ip = endpoint.substring(0, lastColonIndex);
const portStr = endpoint.substring(lastColonIndex + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) return null;
return { ip, port };
}
// IPv4 format: ip:port
if (colonCount === 1) {
const [ip, portStr] = endpoint.split(":");
const port = parseInt(portStr, 10);
if (isNaN(port)) return null;
return { ip, port };
}
return null;
}
/**
* Formats an IP and port into a consistent endpoint string.
* IPv6 addresses are wrapped in brackets for proper parsing.
*
* @param ip The IP address (IPv4 or IPv6)
* @param port The port number
* @returns Formatted endpoint string
*/
export function formatEndpoint(ip: string, port: number): string {
// Check if this is IPv6 (contains colons)
if (ip.includes(":")) {
// Remove brackets if already present
const cleanIp = ip.replace(/^\[|\]$/g, "");
return `[${cleanIp}]:${port}`;
}
return `${ip}:${port}`;
}
/**
* Converts CIDR to IP range
*/
@@ -244,9 +308,13 @@ export function isIpInCidr(ip: string, cidr: string): boolean {
}
export async function getNextAvailableClientSubnet(
orgId: string
orgId: string,
transaction: Transaction | typeof db = db
): Promise<string> {
const [org] = await db.select().from(orgs).where(eq(orgs.orgId, orgId));
const [org] = await transaction
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId));
if (!org) {
throw new Error(`Organization with ID ${orgId} not found`);
@@ -256,14 +324,14 @@ export async function getNextAvailableClientSubnet(
throw new Error(`Organization with ID ${orgId} has no subnet defined`);
}
const existingAddressesSites = await db
const existingAddressesSites = await transaction
.select({
address: sites.address
})
.from(sites)
.where(and(isNotNull(sites.address), eq(sites.orgId, orgId)));
const existingAddressesClients = await db
const existingAddressesClients = await transaction
.select({
address: clients.subnet
})
@@ -359,10 +427,17 @@ export async function getNextAvailableOrgSubnet(): Promise<string> {
return subnet;
}
export function generateRemoteSubnets(allSiteResources: SiteResource[]): string[] {
export function generateRemoteSubnets(
allSiteResources: SiteResource[]
): string[] {
const remoteSubnets = allSiteResources
.filter((sr) => {
if (sr.mode === "cidr") return true;
if (sr.mode === "cidr") {
// check if its a valid CIDR using zod
const cidrSchema = z.union([z.cidrv4(), z.cidrv6()]);
const parseResult = cidrSchema.safeParse(sr.destination);
return parseResult.success;
}
if (sr.mode === "host") {
// check if its a valid IP using zod
const ipSchema = z.union([z.ipv4(), z.ipv6()]);
@@ -386,13 +461,12 @@ export function generateRemoteSubnets(allSiteResources: SiteResource[]): string[
export type Alias = { alias: string | null; aliasAddress: string | null };
export function generateAliasConfig(allSiteResources: SiteResource[]): Alias[] {
let aliasConfigs = allSiteResources
return allSiteResources
.filter((sr) => sr.alias && sr.aliasAddress && sr.mode == "host")
.map((sr) => ({
alias: sr.alias,
aliasAddress: sr.aliasAddress
}));
return aliasConfigs;
}
export type SubnetProxyTarget = {

View File

@@ -14,4 +14,4 @@ export async function logAccessAudit(data: {
requestIp?: string;
}) {
return;
}
}

View File

@@ -14,7 +14,8 @@ export const configSchema = z
.object({
app: z
.object({
dashboard_url: z.url()
dashboard_url: z
.url()
.pipe(z.url())
.transform((url) => url.toLowerCase())
.optional(),
@@ -255,7 +256,10 @@ export const configSchema = z
.object({
block_size: z.number().positive().gt(0).optional().default(24),
subnet_group: z.string().optional().default("100.90.128.0/24"),
utility_subnet_group: z.string().optional().default("100.96.128.0/24") //just hardcode this for now as well
utility_subnet_group: z
.string()
.optional()
.default("100.96.128.0/24") //just hardcode this for now as well
})
.optional()
.default({

View File

@@ -24,7 +24,7 @@ import {
deletePeer as newtDeletePeer
} from "@server/routers/newt/peers";
import {
initPeerAddHandshake as holepunchSiteAdd,
initPeerAddHandshake,
deletePeer as olmDeletePeer
} from "@server/routers/olm/peers";
import { sendToExitNode } from "#dynamic/lib/exitNodes";
@@ -33,6 +33,8 @@ import {
generateAliasConfig,
generateRemoteSubnets,
generateSubnetProxyTargets,
parseEndpoint,
formatEndpoint
} from "@server/lib/ip";
import {
addPeerData,
@@ -109,21 +111,22 @@ export async function getClientSiteResourceAccess(
const directClientIds = allClientSiteResources.map((row) => row.clientId);
// Get full client details for directly associated clients
const directClients = directClientIds.length > 0
? await trx
.select({
clientId: clients.clientId,
pubKey: clients.pubKey,
subnet: clients.subnet
})
.from(clients)
.where(
and(
inArray(clients.clientId, directClientIds),
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations
const directClients =
directClientIds.length > 0
? await trx
.select({
clientId: clients.clientId,
pubKey: clients.pubKey,
subnet: clients.subnet
})
.from(clients)
.where(
and(
inArray(clients.clientId, directClientIds),
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations
)
)
)
: [];
: [];
// Merge user-based clients with directly associated clients
const allClientsMap = new Map(
@@ -474,7 +477,7 @@ async function handleMessagesForSiteClients(
}
if (isAdd) {
await holepunchSiteAdd(
await initPeerAddHandshake(
// this will kick off the add peer process for the client
client.clientId,
{
@@ -541,6 +544,17 @@ export async function updateClientSiteDestinations(
continue;
}
// Parse the endpoint properly for both IPv4 and IPv6
const parsedEndpoint = parseEndpoint(
site.clientSitesAssociationsCache.endpoint
);
if (!parsedEndpoint) {
logger.warn(
`Failed to parse endpoint ${site.clientSitesAssociationsCache.endpoint}, skipping`
);
continue;
}
// find the destinations in the array
let destinations = exitNodeDestinations.find(
(d) => d.reachableAt === site.exitNodes?.reachableAt
@@ -552,13 +566,8 @@ export async function updateClientSiteDestinations(
exitNodeId: site.exitNodes?.exitNodeId || 0,
type: site.exitNodes?.type || "",
name: site.exitNodes?.name || "",
sourceIp:
site.clientSitesAssociationsCache.endpoint.split(":")[0] ||
"",
sourcePort:
parseInt(
site.clientSitesAssociationsCache.endpoint.split(":")[1]
) || 0,
sourceIp: parsedEndpoint.ip,
sourcePort: parsedEndpoint.port,
destinations: [
{
destinationIP: site.sites.subnet.split("/")[0],
@@ -701,11 +710,46 @@ async function handleSubnetProxyTargetUpdates(
}
for (const client of removedClients) {
// Check if this client still has access to another resource on this site with the same destination
const destinationStillInUse = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteResources.siteId, siteResource.siteId),
eq(
siteResources.destination,
siteResource.destination
),
ne(
siteResources.siteResourceId,
siteResource.siteResourceId
)
)
);
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([siteResource]);
olmJobs.push(
removePeerData(
client.clientId,
siteResource.siteId,
generateRemoteSubnets([siteResource]),
remoteSubnetsToRemove,
generateAliasConfig([siteResource])
)
);
@@ -783,7 +827,10 @@ export async function rebuildClientAssociationsFromClient(
.from(roleSiteResources)
.innerJoin(
siteResources,
eq(siteResources.siteResourceId, roleSiteResources.siteResourceId)
eq(
siteResources.siteResourceId,
roleSiteResources.siteResourceId
)
)
.where(
and(
@@ -908,28 +955,8 @@ export async function rebuildClientAssociationsFromClient(
/////////// Send messages ///////////
// Get the olm for this client
const [olm] = await trx
.select({ olmId: olms.olmId })
.from(olms)
.where(eq(olms.clientId, client.clientId))
.limit(1);
if (!olm) {
logger.warn(
`Olm not found for client ${client.clientId}, skipping peer updates`
);
return;
}
// Handle messages for sites being added
await handleMessagesForClientSites(
client,
olm.olmId,
sitesToAdd,
sitesToRemove,
trx
);
await handleMessagesForClientSites(client, sitesToAdd, sitesToRemove, trx);
// Handle subnet proxy target updates for resources
await handleMessagesForClientResources(
@@ -949,11 +976,26 @@ async function handleMessagesForClientSites(
userId: string | null;
orgId: string;
},
olmId: string,
sitesToAdd: number[],
sitesToRemove: number[],
trx: Transaction | typeof db = db
): Promise<void> {
// Get the olm for this client
const [olm] = await trx
.select({ olmId: olms.olmId })
.from(olms)
.where(eq(olms.clientId, client.clientId))
.limit(1);
if (!olm) {
logger.warn(
`Olm not found for client ${client.clientId}, skipping peer updates`
);
return;
}
const olmId = olm.olmId;
if (!client.subnet || !client.pubKey) {
logger.warn(
`Client ${client.clientId} missing subnet or pubKey, skipping peer updates`
@@ -974,9 +1016,9 @@ async function handleMessagesForClientSites(
.leftJoin(newts, eq(sites.siteId, newts.siteId))
.where(inArray(sites.siteId, allSiteIds));
let newtJobs: Promise<any>[] = [];
let olmJobs: Promise<any>[] = [];
let exitNodeJobs: Promise<any>[] = [];
const newtJobs: Promise<any>[] = [];
const olmJobs: Promise<any>[] = [];
const exitNodeJobs: Promise<any>[] = [];
for (const siteData of sitesData) {
const site = siteData.sites;
@@ -1038,7 +1080,7 @@ async function handleMessagesForClientSites(
continue;
}
await holepunchSiteAdd(
await initPeerAddHandshake(
// this will kick off the add peer process for the client
client.clientId,
{
@@ -1083,18 +1125,8 @@ async function handleMessagesForClientResources(
resourcesToRemove: number[],
trx: Transaction | typeof db = db
): Promise<void> {
// Group resources by site
const resourcesBySite = new Map<number, SiteResource[]>();
for (const resource of allNewResources) {
if (!resourcesBySite.has(resource.siteId)) {
resourcesBySite.set(resource.siteId, []);
}
resourcesBySite.get(resource.siteId)!.push(resource);
}
let proxyJobs: Promise<any>[] = [];
let olmJobs: Promise<any>[] = [];
const proxyJobs: Promise<any>[] = [];
const olmJobs: Promise<any>[] = [];
// Handle additions
if (resourcesToAdd.length > 0) {
@@ -1213,12 +1245,47 @@ async function handleMessagesForClientResources(
}
try {
// Check if this client still has access to another resource on this site with the same destination
const destinationStillInUse = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteResources.siteId, resource.siteId),
eq(
siteResources.destination,
resource.destination
),
ne(
siteResources.siteResourceId,
resource.siteResourceId
)
)
);
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([resource]);
// Remove peer data from olm
olmJobs.push(
removePeerData(
client.clientId,
resource.siteId,
generateRemoteSubnets([resource]),
remoteSubnetsToRemove,
generateAliasConfig([resource])
)
);

View File

@@ -1,8 +1,8 @@
export enum AudienceIds {
SignUps = "",
Subscribed = "",
Churned = "",
Newsletter = ""
SignUps = "",
Subscribed = "",
Churned = "",
Newsletter = ""
}
let resend;

View File

@@ -3,14 +3,14 @@ import { Response } from "express";
export const response = <T>(
res: Response,
{ data, success, error, message, status }: ResponseT<T>,
{ data, success, error, message, status }: ResponseT<T>
) => {
return res.status(status).send({
data,
success,
error,
message,
status,
status
});
};

View File

@@ -1,5 +1,5 @@
import { S3Client } from "@aws-sdk/client-s3";
export const s3Client = new S3Client({
region: process.env.S3_REGION || "us-east-1",
region: process.env.S3_REGION || "us-east-1"
});

View File

@@ -6,7 +6,7 @@ let serverIp: string | null = null;
const services = [
"https://checkip.amazonaws.com",
"https://ifconfig.io/ip",
"https://api.ipify.org",
"https://api.ipify.org"
];
export async function fetchServerIp() {
@@ -17,7 +17,9 @@ export async function fetchServerIp() {
logger.debug("Detected public IP: " + serverIp);
return;
} catch (err: any) {
console.warn(`Failed to fetch server IP from ${url}: ${err.message || err.code}`);
console.warn(
`Failed to fetch server IP from ${url}: ${err.message || err.code}`
);
}
}

View File

@@ -1,8 +1,7 @@
export default function stoi(val: any) {
if (typeof val === "string") {
return parseInt(val);
return parseInt(val);
} else {
return val;
}
else {
return val;
}
}
}

View File

@@ -2,9 +2,9 @@ import { PostHog } from "posthog-node";
import config from "./config";
import { getHostMeta } from "./hostMeta";
import logger from "@server/logger";
import { apiKeys, db, roles } from "@server/db";
import { apiKeys, db, roles, siteResources } from "@server/db";
import { sites, users, orgs, resources, clients, idp } from "@server/db";
import { eq, count, notInArray, and } from "drizzle-orm";
import { eq, count, notInArray, and, isNotNull, isNull } from "drizzle-orm";
import { APP_VERSION } from "./consts";
import crypto from "crypto";
import { UserType } from "@server/types/UserTypes";
@@ -25,7 +25,7 @@ class TelemetryClient {
return;
}
if (build !== "oss") {
if (build === "saas") {
return;
}
@@ -41,14 +41,18 @@ class TelemetryClient {
this.client?.shutdown();
});
this.sendStartupEvents().catch((err) => {
logger.error("Failed to send startup telemetry:", err);
});
this.sendStartupEvents()
.catch((err) => {
logger.error("Failed to send startup telemetry:", err);
})
.then(() => {
logger.debug("Successfully sent startup telemetry data");
});
this.startAnalyticsInterval();
logger.info(
"Pangolin now gathers anonymous usage data to help us better understand how the software is used and guide future improvements and feature development. You can find more details, including instructions for opting out of this anonymous data collection, at: https://docs.pangolin.net/telemetry"
"Pangolin gathers anonymous usage data to help us better understand how the software is used and guide future improvements and feature development. You can find more details, including instructions for opting out of this anonymous data collection, at: https://docs.pangolin.net/telemetry"
);
} else if (!this.enabled) {
logger.info(
@@ -60,9 +64,13 @@ class TelemetryClient {
private startAnalyticsInterval() {
this.intervalId = setInterval(
() => {
this.collectAndSendAnalytics().catch((err) => {
logger.error("Failed to collect analytics:", err);
});
this.collectAndSendAnalytics()
.catch((err) => {
logger.error("Failed to collect analytics:", err);
})
.then(() => {
logger.debug("Successfully sent analytics data");
});
},
48 * 60 * 60 * 1000
);
@@ -99,9 +107,14 @@ class TelemetryClient {
const [resourcesCount] = await db
.select({ count: count() })
.from(resources);
const [clientsCount] = await db
const [userDevicesCount] = await db
.select({ count: count() })
.from(clients);
.from(clients)
.where(isNotNull(clients.userId));
const [machineClients] = await db
.select({ count: count() })
.from(clients)
.where(isNull(clients.userId));
const [idpCount] = await db.select({ count: count() }).from(idp);
const [onlineSitesCount] = await db
.select({ count: count() })
@@ -146,6 +159,24 @@ class TelemetryClient {
const supporterKey = config.getSupporterData();
const allPrivateResources = await db.select().from(siteResources);
const numPrivResources = allPrivateResources.length;
let numPrivResourceAliases = 0;
let numPrivResourceHosts = 0;
let numPrivResourceCidr = 0;
for (const res of allPrivateResources) {
if (res.mode === "host") {
numPrivResourceHosts += 1;
} else if (res.mode === "cidr") {
numPrivResourceCidr += 1;
}
if (res.alias) {
numPrivResourceAliases += 1;
}
}
return {
numSites: sitesCount.count,
numUsers: usersCount.count,
@@ -153,7 +184,11 @@ class TelemetryClient {
numUsersOidc: usersOidcCount.count,
numOrganizations: orgsCount.count,
numResources: resourcesCount.count,
numClients: clientsCount.count,
numPrivateResources: numPrivResources,
numPrivateResourceAliases: numPrivResourceAliases,
numPrivateResourceHosts: numPrivResourceHosts,
numUserDevices: userDevicesCount.count,
numMachineClients: machineClients.count,
numIdentityProviders: idpCount.count,
numSitesOnline: onlineSitesCount.count,
resources: resourceDetails,
@@ -196,7 +231,7 @@ class TelemetryClient {
logger.debug("Sending enterprise startup telemetry payload:", {
payload
});
// this.client.capture(payload);
this.client.capture(payload);
}
if (build === "oss") {
@@ -246,7 +281,12 @@ class TelemetryClient {
num_users_oidc: stats.numUsersOidc,
num_organizations: stats.numOrganizations,
num_resources: stats.numResources,
num_clients: stats.numClients,
num_private_resources: stats.numPrivateResources,
num_private_resource_aliases:
stats.numPrivateResourceAliases,
num_private_resource_hosts: stats.numPrivateResourceHosts,
num_user_devices: stats.numUserDevices,
num_machine_clients: stats.numMachineClients,
num_identity_providers: stats.numIdentityProviders,
num_sites_online: stats.numSitesOnline,
num_resources_sso_enabled: stats.resources.filter(

View File

@@ -195,7 +195,9 @@ export class TraefikConfigManager {
state.set(domain, {
exists: certExists && keyExists,
lastModified: lastModified ? Math.floor(lastModified.getTime() / 1000) : null,
lastModified: lastModified
? Math.floor(lastModified.getTime() / 1000)
: null,
expiresAt,
wildcard
});
@@ -464,7 +466,9 @@ export class TraefikConfigManager {
config.getRawConfig().traefik.site_types,
build == "oss", // filter out the namespace domains in open source
build != "oss", // generate the login pages on the cloud and hybrid,
build == "saas" ? false : config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
build == "saas"
? false
: config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
);
const domains = new Set<string>();
@@ -786,29 +790,30 @@ export class TraefikConfigManager {
"utf8"
);
// Store the certificate expiry time
if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at");
fs.writeFileSync(
expiresAtPath,
cert.expiresAt.toString(),
"utf8"
);
}
logger.info(
`Certificate updated for domain: ${cert.domain}${cert.wildcard ? " (wildcard)" : ""}`
);
// Update local state tracking
this.lastLocalCertificateState.set(cert.domain, {
exists: true,
lastModified: Math.floor(Date.now() / 1000),
expiresAt: cert.expiresAt,
wildcard: cert.wildcard
});
}
// Always update expiry tracking when we fetch a certificate,
// even if the cert content didn't change
if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at");
fs.writeFileSync(
expiresAtPath,
cert.expiresAt.toString(),
"utf8"
);
}
// Update local state tracking
this.lastLocalCertificateState.set(cert.domain, {
exists: true,
lastModified: Math.floor(Date.now() / 1000),
expiresAt: cert.expiresAt,
wildcard: cert.wildcard
});
// Always ensure the config entry exists and is up to date
const certEntry = {
certFile: certPath,

View File

@@ -1 +1 @@
export * from "./getTraefikConfig";
export * from "./getTraefikConfig";

View File

@@ -2,234 +2,249 @@ import { assertEquals } from "@test/assert";
import { isDomainCoveredByWildcard } from "./TraefikConfigManager";
function runTests() {
console.log('Running wildcard domain coverage tests...');
console.log("Running wildcard domain coverage tests...");
// Test case 1: Basic wildcard certificate at example.com
const basicWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }]
["example.com", { exists: true, wildcard: true }]
]);
// Should match first-level subdomains
assertEquals(
isDomainCoveredByWildcard('level1.example.com', basicWildcardCerts),
isDomainCoveredByWildcard("level1.example.com", basicWildcardCerts),
true,
'Wildcard cert at example.com should match level1.example.com'
"Wildcard cert at example.com should match level1.example.com"
);
assertEquals(
isDomainCoveredByWildcard('api.example.com', basicWildcardCerts),
isDomainCoveredByWildcard("api.example.com", basicWildcardCerts),
true,
'Wildcard cert at example.com should match api.example.com'
"Wildcard cert at example.com should match api.example.com"
);
assertEquals(
isDomainCoveredByWildcard('www.example.com', basicWildcardCerts),
isDomainCoveredByWildcard("www.example.com", basicWildcardCerts),
true,
'Wildcard cert at example.com should match www.example.com'
"Wildcard cert at example.com should match www.example.com"
);
// Should match the root domain (exact match)
assertEquals(
isDomainCoveredByWildcard('example.com', basicWildcardCerts),
isDomainCoveredByWildcard("example.com", basicWildcardCerts),
true,
'Wildcard cert at example.com should match example.com itself'
"Wildcard cert at example.com should match example.com itself"
);
// Should NOT match second-level subdomains
assertEquals(
isDomainCoveredByWildcard('level2.level1.example.com', basicWildcardCerts),
isDomainCoveredByWildcard(
"level2.level1.example.com",
basicWildcardCerts
),
false,
'Wildcard cert at example.com should NOT match level2.level1.example.com'
"Wildcard cert at example.com should NOT match level2.level1.example.com"
);
assertEquals(
isDomainCoveredByWildcard('deep.nested.subdomain.example.com', basicWildcardCerts),
isDomainCoveredByWildcard(
"deep.nested.subdomain.example.com",
basicWildcardCerts
),
false,
'Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com'
"Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com"
);
// Should NOT match different domains
assertEquals(
isDomainCoveredByWildcard('test.otherdomain.com', basicWildcardCerts),
isDomainCoveredByWildcard("test.otherdomain.com", basicWildcardCerts),
false,
'Wildcard cert at example.com should NOT match test.otherdomain.com'
"Wildcard cert at example.com should NOT match test.otherdomain.com"
);
assertEquals(
isDomainCoveredByWildcard('notexample.com', basicWildcardCerts),
isDomainCoveredByWildcard("notexample.com", basicWildcardCerts),
false,
'Wildcard cert at example.com should NOT match notexample.com'
"Wildcard cert at example.com should NOT match notexample.com"
);
// Test case 2: Multiple wildcard certificates
const multipleWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }],
['test.org', { exists: true, wildcard: true }],
['api.service.net', { exists: true, wildcard: true }]
["example.com", { exists: true, wildcard: true }],
["test.org", { exists: true, wildcard: true }],
["api.service.net", { exists: true, wildcard: true }]
]);
assertEquals(
isDomainCoveredByWildcard('app.example.com', multipleWildcardCerts),
isDomainCoveredByWildcard("app.example.com", multipleWildcardCerts),
true,
'Should match subdomain of first wildcard cert'
"Should match subdomain of first wildcard cert"
);
assertEquals(
isDomainCoveredByWildcard('staging.test.org', multipleWildcardCerts),
isDomainCoveredByWildcard("staging.test.org", multipleWildcardCerts),
true,
'Should match subdomain of second wildcard cert'
"Should match subdomain of second wildcard cert"
);
assertEquals(
isDomainCoveredByWildcard('v1.api.service.net', multipleWildcardCerts),
isDomainCoveredByWildcard("v1.api.service.net", multipleWildcardCerts),
true,
'Should match subdomain of third wildcard cert'
"Should match subdomain of third wildcard cert"
);
assertEquals(
isDomainCoveredByWildcard('deep.nested.api.service.net', multipleWildcardCerts),
isDomainCoveredByWildcard(
"deep.nested.api.service.net",
multipleWildcardCerts
),
false,
'Should NOT match multi-level subdomain of third wildcard cert'
"Should NOT match multi-level subdomain of third wildcard cert"
);
// Test exact domain matches for multiple certs
assertEquals(
isDomainCoveredByWildcard('example.com', multipleWildcardCerts),
isDomainCoveredByWildcard("example.com", multipleWildcardCerts),
true,
'Should match exact domain of first wildcard cert'
"Should match exact domain of first wildcard cert"
);
assertEquals(
isDomainCoveredByWildcard('test.org', multipleWildcardCerts),
isDomainCoveredByWildcard("test.org", multipleWildcardCerts),
true,
'Should match exact domain of second wildcard cert'
"Should match exact domain of second wildcard cert"
);
assertEquals(
isDomainCoveredByWildcard('api.service.net', multipleWildcardCerts),
isDomainCoveredByWildcard("api.service.net", multipleWildcardCerts),
true,
'Should match exact domain of third wildcard cert'
"Should match exact domain of third wildcard cert"
);
// Test case 3: Non-wildcard certificates (should not match anything)
const nonWildcardCerts = new Map([
['example.com', { exists: true, wildcard: false }],
['specific.domain.com', { exists: true, wildcard: false }]
["example.com", { exists: true, wildcard: false }],
["specific.domain.com", { exists: true, wildcard: false }]
]);
assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonWildcardCerts),
isDomainCoveredByWildcard("sub.example.com", nonWildcardCerts),
false,
'Non-wildcard cert should not match subdomains'
"Non-wildcard cert should not match subdomains"
);
assertEquals(
isDomainCoveredByWildcard('example.com', nonWildcardCerts),
isDomainCoveredByWildcard("example.com", nonWildcardCerts),
false,
'Non-wildcard cert should not match even exact domain via this function'
"Non-wildcard cert should not match even exact domain via this function"
);
// Test case 4: Non-existent certificates (should not match)
const nonExistentCerts = new Map([
['example.com', { exists: false, wildcard: true }],
['missing.com', { exists: false, wildcard: true }]
["example.com", { exists: false, wildcard: true }],
["missing.com", { exists: false, wildcard: true }]
]);
assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonExistentCerts),
isDomainCoveredByWildcard("sub.example.com", nonExistentCerts),
false,
'Non-existent wildcard cert should not match'
"Non-existent wildcard cert should not match"
);
// Test case 5: Edge cases with special domain names
const specialDomainCerts = new Map([
['localhost', { exists: true, wildcard: true }],
['127-0-0-1.nip.io', { exists: true, wildcard: true }],
['xn--e1afmkfd.xn--p1ai', { exists: true, wildcard: true }] // IDN domain
["localhost", { exists: true, wildcard: true }],
["127-0-0-1.nip.io", { exists: true, wildcard: true }],
["xn--e1afmkfd.xn--p1ai", { exists: true, wildcard: true }] // IDN domain
]);
assertEquals(
isDomainCoveredByWildcard('app.localhost', specialDomainCerts),
isDomainCoveredByWildcard("app.localhost", specialDomainCerts),
true,
'Should match subdomain of localhost wildcard'
"Should match subdomain of localhost wildcard"
);
assertEquals(
isDomainCoveredByWildcard('test.127-0-0-1.nip.io', specialDomainCerts),
isDomainCoveredByWildcard("test.127-0-0-1.nip.io", specialDomainCerts),
true,
'Should match subdomain of nip.io wildcard'
"Should match subdomain of nip.io wildcard"
);
assertEquals(
isDomainCoveredByWildcard('sub.xn--e1afmkfd.xn--p1ai', specialDomainCerts),
isDomainCoveredByWildcard(
"sub.xn--e1afmkfd.xn--p1ai",
specialDomainCerts
),
true,
'Should match subdomain of IDN wildcard'
"Should match subdomain of IDN wildcard"
);
// Test case 6: Empty input and edge cases
const emptyCerts = new Map();
assertEquals(
isDomainCoveredByWildcard('any.domain.com', emptyCerts),
isDomainCoveredByWildcard("any.domain.com", emptyCerts),
false,
'Empty certificate map should not match any domain'
"Empty certificate map should not match any domain"
);
// Test case 7: Domains with single character components
const singleCharCerts = new Map([
['a.com', { exists: true, wildcard: true }],
['x.y.z', { exists: true, wildcard: true }]
["a.com", { exists: true, wildcard: true }],
["x.y.z", { exists: true, wildcard: true }]
]);
assertEquals(
isDomainCoveredByWildcard('b.a.com', singleCharCerts),
isDomainCoveredByWildcard("b.a.com", singleCharCerts),
true,
'Should match single character subdomain'
"Should match single character subdomain"
);
assertEquals(
isDomainCoveredByWildcard('w.x.y.z', singleCharCerts),
isDomainCoveredByWildcard("w.x.y.z", singleCharCerts),
true,
'Should match single character subdomain of multi-part domain'
"Should match single character subdomain of multi-part domain"
);
assertEquals(
isDomainCoveredByWildcard('v.w.x.y.z', singleCharCerts),
isDomainCoveredByWildcard("v.w.x.y.z", singleCharCerts),
false,
'Should NOT match multi-level subdomain of single char domain'
"Should NOT match multi-level subdomain of single char domain"
);
// Test case 8: Domains with numbers and hyphens
const numericCerts = new Map([
['api-v2.service-1.com', { exists: true, wildcard: true }],
['123.456.net', { exists: true, wildcard: true }]
["api-v2.service-1.com", { exists: true, wildcard: true }],
["123.456.net", { exists: true, wildcard: true }]
]);
assertEquals(
isDomainCoveredByWildcard('staging.api-v2.service-1.com', numericCerts),
isDomainCoveredByWildcard("staging.api-v2.service-1.com", numericCerts),
true,
'Should match subdomain with hyphens and numbers'
"Should match subdomain with hyphens and numbers"
);
assertEquals(
isDomainCoveredByWildcard('test.123.456.net', numericCerts),
isDomainCoveredByWildcard("test.123.456.net", numericCerts),
true,
'Should match subdomain with numeric components'
"Should match subdomain with numeric components"
);
assertEquals(
isDomainCoveredByWildcard('deep.staging.api-v2.service-1.com', numericCerts),
isDomainCoveredByWildcard(
"deep.staging.api-v2.service-1.com",
numericCerts
),
false,
'Should NOT match multi-level subdomain with hyphens and numbers'
"Should NOT match multi-level subdomain with hyphens and numbers"
);
console.log('All wildcard domain coverage tests passed!');
console.log("All wildcard domain coverage tests passed!");
}
// Run all tests
try {
runTests();
} catch (error) {
console.error('Test failed:', error);
console.error("Test failed:", error);
process.exit(1);
}

View File

@@ -31,12 +31,17 @@ export function validatePathRewriteConfig(
}
if (rewritePathType !== "stripPrefix") {
if ((rewritePath && !rewritePathType) || (!rewritePath && rewritePathType)) {
return { isValid: false, error: "Both rewritePath and rewritePathType must be specified together" };
if (
(rewritePath && !rewritePathType) ||
(!rewritePath && rewritePathType)
) {
return {
isValid: false,
error: "Both rewritePath and rewritePathType must be specified together"
};
}
}
if (!rewritePath || !rewritePathType) {
return { isValid: true };
}
@@ -68,14 +73,14 @@ export function validatePathRewriteConfig(
}
}
// Additional validation for stripPrefix
if (rewritePathType === "stripPrefix") {
if (pathMatchType !== "prefix") {
logger.warn(`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`);
logger.warn(
`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`
);
}
}
return { isValid: true };
}

View File

@@ -1,71 +1,247 @@
import { isValidUrlGlobPattern } from "./validators";
import { isValidUrlGlobPattern } from "./validators";
import { assertEquals } from "@test/assert";
function runTests() {
console.log('Running URL pattern validation tests...');
console.log("Running URL pattern validation tests...");
// Test valid patterns
assertEquals(isValidUrlGlobPattern('simple'), true, 'Simple path segment should be valid');
assertEquals(isValidUrlGlobPattern('simple/path'), true, 'Simple path with slash should be valid');
assertEquals(isValidUrlGlobPattern('/leading/slash'), true, 'Path with leading slash should be valid');
assertEquals(isValidUrlGlobPattern('path/'), true, 'Path with trailing slash should be valid');
assertEquals(isValidUrlGlobPattern('path/*'), true, 'Path with wildcard segment should be valid');
assertEquals(isValidUrlGlobPattern('*'), true, 'Single wildcard should be valid');
assertEquals(isValidUrlGlobPattern('*/subpath'), true, 'Wildcard with subpath should be valid');
assertEquals(isValidUrlGlobPattern('path/*/more'), true, 'Path with wildcard in the middle should be valid');
assertEquals(
isValidUrlGlobPattern("simple"),
true,
"Simple path segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("simple/path"),
true,
"Simple path with slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("/leading/slash"),
true,
"Path with leading slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/"),
true,
"Path with trailing slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*"),
true,
"Path with wildcard segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*"),
true,
"Single wildcard should be valid"
);
assertEquals(
isValidUrlGlobPattern("*/subpath"),
true,
"Wildcard with subpath should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*/more"),
true,
"Path with wildcard in the middle should be valid"
);
// Test with special characters
assertEquals(isValidUrlGlobPattern('path-with-dash'), true, 'Path with dash should be valid');
assertEquals(isValidUrlGlobPattern('path_with_underscore'), true, 'Path with underscore should be valid');
assertEquals(isValidUrlGlobPattern('path.with.dots'), true, 'Path with dots should be valid');
assertEquals(isValidUrlGlobPattern('path~with~tilde'), true, 'Path with tilde should be valid');
assertEquals(isValidUrlGlobPattern('path!with!exclamation'), true, 'Path with exclamation should be valid');
assertEquals(isValidUrlGlobPattern('path$with$dollar'), true, 'Path with dollar should be valid');
assertEquals(isValidUrlGlobPattern('path&with&ampersand'), true, 'Path with ampersand should be valid');
assertEquals(isValidUrlGlobPattern("path'with'quote"), true, "Path with quote should be valid");
assertEquals(isValidUrlGlobPattern('path(with)parentheses'), true, 'Path with parentheses should be valid');
assertEquals(isValidUrlGlobPattern('path+with+plus'), true, 'Path with plus should be valid');
assertEquals(isValidUrlGlobPattern('path,with,comma'), true, 'Path with comma should be valid');
assertEquals(isValidUrlGlobPattern('path;with;semicolon'), true, 'Path with semicolon should be valid');
assertEquals(isValidUrlGlobPattern('path=with=equals'), true, 'Path with equals should be valid');
assertEquals(isValidUrlGlobPattern('path:with:colon'), true, 'Path with colon should be valid');
assertEquals(isValidUrlGlobPattern('path@with@at'), true, 'Path with at should be valid');
assertEquals(
isValidUrlGlobPattern("path-with-dash"),
true,
"Path with dash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path_with_underscore"),
true,
"Path with underscore should be valid"
);
assertEquals(
isValidUrlGlobPattern("path.with.dots"),
true,
"Path with dots should be valid"
);
assertEquals(
isValidUrlGlobPattern("path~with~tilde"),
true,
"Path with tilde should be valid"
);
assertEquals(
isValidUrlGlobPattern("path!with!exclamation"),
true,
"Path with exclamation should be valid"
);
assertEquals(
isValidUrlGlobPattern("path$with$dollar"),
true,
"Path with dollar should be valid"
);
assertEquals(
isValidUrlGlobPattern("path&with&ampersand"),
true,
"Path with ampersand should be valid"
);
assertEquals(
isValidUrlGlobPattern("path'with'quote"),
true,
"Path with quote should be valid"
);
assertEquals(
isValidUrlGlobPattern("path(with)parentheses"),
true,
"Path with parentheses should be valid"
);
assertEquals(
isValidUrlGlobPattern("path+with+plus"),
true,
"Path with plus should be valid"
);
assertEquals(
isValidUrlGlobPattern("path,with,comma"),
true,
"Path with comma should be valid"
);
assertEquals(
isValidUrlGlobPattern("path;with;semicolon"),
true,
"Path with semicolon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path=with=equals"),
true,
"Path with equals should be valid"
);
assertEquals(
isValidUrlGlobPattern("path:with:colon"),
true,
"Path with colon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path@with@at"),
true,
"Path with at should be valid"
);
// Test with percent encoding
assertEquals(isValidUrlGlobPattern('path%20with%20spaces'), true, 'Path with percent-encoded spaces should be valid');
assertEquals(isValidUrlGlobPattern('path%2Fwith%2Fencoded%2Fslashes'), true, 'Path with percent-encoded slashes should be valid');
assertEquals(
isValidUrlGlobPattern("path%20with%20spaces"),
true,
"Path with percent-encoded spaces should be valid"
);
assertEquals(
isValidUrlGlobPattern("path%2Fwith%2Fencoded%2Fslashes"),
true,
"Path with percent-encoded slashes should be valid"
);
// Test with wildcards in segments (the fixed functionality)
assertEquals(isValidUrlGlobPattern('padbootstrap*'), true, 'Path with wildcard at the end of segment should be valid');
assertEquals(isValidUrlGlobPattern('pad*bootstrap'), true, 'Path with wildcard in the middle of segment should be valid');
assertEquals(isValidUrlGlobPattern('*bootstrap'), true, 'Path with wildcard at the start of segment should be valid');
assertEquals(isValidUrlGlobPattern('multiple*wildcards*in*segment'), true, 'Path with multiple wildcards in segment should be valid');
assertEquals(isValidUrlGlobPattern('wild*/cards/in*/different/seg*ments'), true, 'Path with wildcards in different segments should be valid');
assertEquals(
isValidUrlGlobPattern("padbootstrap*"),
true,
"Path with wildcard at the end of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("pad*bootstrap"),
true,
"Path with wildcard in the middle of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*bootstrap"),
true,
"Path with wildcard at the start of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("multiple*wildcards*in*segment"),
true,
"Path with multiple wildcards in segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("wild*/cards/in*/different/seg*ments"),
true,
"Path with wildcards in different segments should be valid"
);
// Test invalid patterns
assertEquals(isValidUrlGlobPattern(''), false, 'Empty string should be invalid');
assertEquals(isValidUrlGlobPattern('//double/slash'), false, 'Path with double slash should be invalid');
assertEquals(isValidUrlGlobPattern('path//end'), false, 'Path with double slash in the middle should be invalid');
assertEquals(isValidUrlGlobPattern('invalid<char>'), false, 'Path with invalid characters should be invalid');
assertEquals(isValidUrlGlobPattern('invalid|char'), false, 'Path with invalid pipe character should be invalid');
assertEquals(isValidUrlGlobPattern('invalid"char'), false, 'Path with invalid quote character should be invalid');
assertEquals(isValidUrlGlobPattern('invalid`char'), false, 'Path with invalid backtick character should be invalid');
assertEquals(isValidUrlGlobPattern('invalid^char'), false, 'Path with invalid caret character should be invalid');
assertEquals(isValidUrlGlobPattern('invalid\\char'), false, 'Path with invalid backslash character should be invalid');
assertEquals(isValidUrlGlobPattern('invalid[char]'), false, 'Path with invalid square brackets should be invalid');
assertEquals(isValidUrlGlobPattern('invalid{char}'), false, 'Path with invalid curly braces should be invalid');
assertEquals(
isValidUrlGlobPattern(""),
false,
"Empty string should be invalid"
);
assertEquals(
isValidUrlGlobPattern("//double/slash"),
false,
"Path with double slash should be invalid"
);
assertEquals(
isValidUrlGlobPattern("path//end"),
false,
"Path with double slash in the middle should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid<char>"),
false,
"Path with invalid characters should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid|char"),
false,
"Path with invalid pipe character should be invalid"
);
assertEquals(
isValidUrlGlobPattern('invalid"char'),
false,
"Path with invalid quote character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid`char"),
false,
"Path with invalid backtick character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid^char"),
false,
"Path with invalid caret character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid\\char"),
false,
"Path with invalid backslash character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid[char]"),
false,
"Path with invalid square brackets should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid{char}"),
false,
"Path with invalid curly braces should be invalid"
);
// Test invalid percent encoding
assertEquals(isValidUrlGlobPattern('invalid%2'), false, 'Path with incomplete percent encoding should be invalid');
assertEquals(isValidUrlGlobPattern('invalid%GZ'), false, 'Path with invalid hex in percent encoding should be invalid');
assertEquals(isValidUrlGlobPattern('invalid%'), false, 'Path with isolated percent sign should be invalid');
console.log('All tests passed!');
assertEquals(
isValidUrlGlobPattern("invalid%2"),
false,
"Path with incomplete percent encoding should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid%GZ"),
false,
"Path with invalid hex in percent encoding should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid%"),
false,
"Path with isolated percent sign should be invalid"
);
console.log("All tests passed!");
}
// Run all tests
try {
runTests();
} catch (error) {
console.error('Test failed:', error);
}
console.error("Test failed:", error);
}

View File

@@ -2,7 +2,9 @@ import z from "zod";
import ipaddr from "ipaddr.js";
export function isValidCIDR(cidr: string): boolean {
return z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success;
return (
z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success
);
}
export function isValidIP(ip: string): boolean {
@@ -69,11 +71,11 @@ export function isUrlValid(url: string | undefined) {
if (!url) return true; // the link is optional in the schema so if it's empty it's valid
var pattern = new RegExp(
"^(https?:\\/\\/)?" + // protocol
"((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name
"((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address
"(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path
"(\\?[;&a-z\\d%_.~+=-]*)?" + // query string
"(\\#[-a-z\\d_]*)?$",
"((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name
"((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address
"(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path
"(\\?[;&a-z\\d%_.~+=-]*)?" + // query string
"(\\#[-a-z\\d_]*)?$",
"i"
);
return !!pattern.test(url);
@@ -168,14 +170,14 @@ export function validateHeaders(headers: string): boolean {
}
export function isSecondLevelDomain(domain: string): boolean {
if (!domain || typeof domain !== 'string') {
if (!domain || typeof domain !== "string") {
return false;
}
const trimmedDomain = domain.trim().toLowerCase();
// Split into parts
const parts = trimmedDomain.split('.');
const parts = trimmedDomain.split(".");
// Should have exactly 2 parts for a second-level domain (e.g., "example.com")
if (parts.length !== 2) {

View File

@@ -20,6 +20,6 @@ export const errorHandlerMiddleware: ErrorRequestHandler = (
error: true,
message: error.message || "Internal Server Error",
status: statusCode,
stack: process.env.ENVIRONMENT === "prod" ? null : error.stack,
stack: process.env.ENVIRONMENT === "prod" ? null : error.stack
});
};

View File

@@ -8,13 +8,13 @@ import HttpCode from "@server/types/HttpCode";
export async function getUserOrgs(
req: Request,
res: Response,
next: NextFunction,
next: NextFunction
) {
const userId = req.user?.userId; // Assuming you have user information in the request
if (!userId) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated"),
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated")
);
}
@@ -22,7 +22,7 @@ export async function getUserOrgs(
const userOrganizations = await db
.select({
orgId: userOrgs.orgId,
roleId: userOrgs.roleId,
roleId: userOrgs.roleId
})
.from(userOrgs)
.where(eq(userOrgs.userId, userId));
@@ -38,8 +38,8 @@ export async function getUserOrgs(
next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error retrieving user organizations",
),
"Error retrieving user organizations"
)
);
}
}

View File

@@ -12,4 +12,4 @@ export * from "./verifyAccessTokenAccess";
export * from "./verifyApiKeyIsRoot";
export * from "./verifyApiKeyApiKeyAccess";
export * from "./verifyApiKeyClientAccess";
export * from "./verifyApiKeySiteResourceAccess";
export * from "./verifyApiKeySiteResourceAccess";

View File

@@ -97,7 +97,6 @@ export async function verifyApiKeyAccessTokenAccess(
);
}
return next();
} catch (e) {
return next(

View File

@@ -11,7 +11,7 @@ export async function verifyApiKeyApiKeyAccess(
next: NextFunction
) {
try {
const {apiKey: callerApiKey } = req;
const { apiKey: callerApiKey } = req;
const apiKeyId =
req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId;
@@ -44,7 +44,10 @@ export async function verifyApiKeyApiKeyAccess(
.select()
.from(apiKeyOrg)
.where(
and(eq(apiKeys.apiKeyId, callerApiKey.apiKeyId), eq(apiKeyOrg.orgId, orgId))
and(
eq(apiKeys.apiKeyId, callerApiKey.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
)
.limit(1);

View File

@@ -11,9 +11,12 @@ export async function verifyApiKeySetResourceClients(
next: NextFunction
) {
const apiKey = req.apiKey;
const singleClientId = req.params.clientId || req.body.clientId || req.query.clientId;
const singleClientId =
req.params.clientId || req.body.clientId || req.query.clientId;
const { clientIds } = req.body;
const allClientIds = clientIds || (singleClientId ? [parseInt(singleClientId as string)] : []);
const allClientIds =
clientIds ||
(singleClientId ? [parseInt(singleClientId as string)] : []);
if (!apiKey) {
return next(
@@ -70,4 +73,3 @@ export async function verifyApiKeySetResourceClients(
);
}
}

View File

@@ -11,7 +11,8 @@ export async function verifyApiKeySetResourceUsers(
next: NextFunction
) {
const apiKey = req.apiKey;
const singleUserId = req.params.userId || req.body.userId || req.query.userId;
const singleUserId =
req.params.userId || req.body.userId || req.query.userId;
const { userIds } = req.body;
const allUserIds = userIds || (singleUserId ? [singleUserId] : []);

View File

@@ -38,17 +38,12 @@ export async function verifyApiKeySiteResourceAccess(
const [siteResource] = await db
.select()
.from(siteResources)
.where(and(
eq(siteResources.siteResourceId, siteResourceId)
))
.where(and(eq(siteResources.siteResourceId, siteResourceId)))
.limit(1);
if (!siteResource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
"Site resource not found"
)
createHttpError(HttpCode.NOT_FOUND, "Site resource not found")
);
}

View File

@@ -5,7 +5,7 @@ import HttpCode from "@server/types/HttpCode";
export function notFoundMiddleware(
req: Request,
res: Response,
next: NextFunction,
next: NextFunction
) {
if (req.path.startsWith("/api")) {
const message = `The requests url is not found - ${req.originalUrl}`;

View File

@@ -1,30 +1,32 @@
import { Request, Response, NextFunction } from 'express';
import logger from '@server/logger';
import createHttpError from 'http-errors';
import HttpCode from '@server/types/HttpCode';
import { Request, Response, NextFunction } from "express";
import logger from "@server/logger";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export function requestTimeoutMiddleware(timeoutMs: number = 30000) {
return (req: Request, res: Response, next: NextFunction) => {
// Set a timeout for the request
const timeout = setTimeout(() => {
if (!res.headersSent) {
logger.error(`Request timeout: ${req.method} ${req.url} from ${req.ip}`);
logger.error(
`Request timeout: ${req.method} ${req.url} from ${req.ip}`
);
return next(
createHttpError(
HttpCode.REQUEST_TIMEOUT,
'Request timeout - operation took too long to complete'
"Request timeout - operation took too long to complete"
)
);
}
}, timeoutMs);
// Clear timeout when response finishes
res.on('finish', () => {
res.on("finish", () => {
clearTimeout(timeout);
});
// Clear timeout when response closes
res.on('close', () => {
res.on("close", () => {
clearTimeout(timeout);
});

View File

@@ -76,7 +76,10 @@ export async function verifySiteAccess(
.select()
.from(userOrgs)
.where(
and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, site.orgId))
and(
eq(userOrgs.userId, userId),
eq(userOrgs.orgId, site.orgId)
)
)
.limit(1);
req.userOrg = userOrgRole[0];

View File

@@ -9,7 +9,10 @@ const nextPort = config.getRawConfig().server.next_port;
export async function createNextServer() {
// const app = next({ dev });
const app = next({ dev: process.env.ENVIRONMENT !== "prod", turbopack: true });
const app = next({
dev: process.env.ENVIRONMENT !== "prod",
turbopack: true
});
const handle = app.getRequestHandler();
await app.prepare();

View File

@@ -11,11 +11,14 @@
* This file is not licensed under the AGPLv3.
*/
import {
encodeHexLowerCase,
} from "@oslojs/encoding";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { RemoteExitNode, remoteExitNodes, remoteExitNodeSessions, RemoteExitNodeSession } from "@server/db";
import {
RemoteExitNode,
remoteExitNodes,
remoteExitNodeSessions,
RemoteExitNodeSession
} from "@server/db";
import { db } from "@server/db";
import { eq } from "drizzle-orm";
@@ -23,30 +26,39 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createRemoteExitNodeSession(
token: string,
remoteExitNodeId: string,
remoteExitNodeId: string
): Promise<RemoteExitNodeSession> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const session: RemoteExitNodeSession = {
sessionId: sessionId,
remoteExitNodeId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(),
expiresAt: new Date(Date.now() + EXPIRES).getTime()
};
await db.insert(remoteExitNodeSessions).values(session);
return session;
}
export async function validateRemoteExitNodeSessionToken(
token: string,
token: string
): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)),
sha256(new TextEncoder().encode(token))
);
const result = await db
.select({ remoteExitNode: remoteExitNodes, session: remoteExitNodeSessions })
.select({
remoteExitNode: remoteExitNodes,
session: remoteExitNodeSessions
})
.from(remoteExitNodeSessions)
.innerJoin(remoteExitNodes, eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodes.remoteExitNodeId))
.innerJoin(
remoteExitNodes,
eq(
remoteExitNodeSessions.remoteExitNodeId,
remoteExitNodes.remoteExitNodeId
)
)
.where(eq(remoteExitNodeSessions.sessionId, sessionId));
if (result.length < 1) {
return { session: null, remoteExitNode: null };
@@ -58,26 +70,32 @@ export async function validateRemoteExitNodeSessionToken(
.where(eq(remoteExitNodeSessions.sessionId, session.sessionId));
return { session: null, remoteExitNode: null };
}
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) {
session.expiresAt = new Date(
Date.now() + EXPIRES,
).getTime();
if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
await db
.update(remoteExitNodeSessions)
.set({
expiresAt: session.expiresAt,
expiresAt: session.expiresAt
})
.where(eq(remoteExitNodeSessions.sessionId, session.sessionId));
}
return { session, remoteExitNode };
}
export async function invalidateRemoteExitNodeSession(sessionId: string): Promise<void> {
await db.delete(remoteExitNodeSessions).where(eq(remoteExitNodeSessions.sessionId, sessionId));
export async function invalidateRemoteExitNodeSession(
sessionId: string
): Promise<void> {
await db
.delete(remoteExitNodeSessions)
.where(eq(remoteExitNodeSessions.sessionId, sessionId));
}
export async function invalidateAllRemoteExitNodeSessions(remoteExitNodeId: string): Promise<void> {
await db.delete(remoteExitNodeSessions).where(eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodeId));
export async function invalidateAllRemoteExitNodeSessions(
remoteExitNodeId: string
): Promise<void> {
await db
.delete(remoteExitNodeSessions)
.where(eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodeId));
}
export type SessionValidationResult =

Some files were not shown because too many files have changed in this diff Show More