Format all files

This commit is contained in:
Owen
2025-12-09 10:56:14 -05:00
parent fa839a811f
commit f9b03943c3
535 changed files with 7670 additions and 5626 deletions

View File

@@ -1,6 +1,3 @@
{ {
"extends": [ "extends": ["next/core-web-vitals", "next/typescript"]
"next/core-web-vitals",
"next/typescript"
]
} }

View File

@@ -1,3 +1,3 @@
{ {
"recommendations": ["esbenp.prettier-vscode"] "recommendations": ["esbenp.prettier-vscode"]
} }

View File

@@ -19,4 +19,4 @@
"editor.defaultFormatter": "esbenp.prettier-vscode" "editor.defaultFormatter": "esbenp.prettier-vscode"
}, },
"editor.formatOnSave": true "editor.formatOnSave": true
} }

View File

@@ -17,4 +17,4 @@
"lib": "@/lib", "lib": "@/lib",
"hooks": "@/hooks" "hooks": "@/hooks"
} }
} }

View File

@@ -1,9 +1,7 @@
import { defineConfig } from "drizzle-kit"; import { defineConfig } from "drizzle-kit";
import path from "path"; import path from "path";
const schema = [ const schema = [path.join("server", "db", "pg", "schema")];
path.join("server", "db", "pg", "schema"),
];
export default defineConfig({ export default defineConfig({
dialect: "postgresql", dialect: "postgresql",

View File

@@ -2,9 +2,7 @@ import { APP_PATH } from "@server/lib/consts";
import { defineConfig } from "drizzle-kit"; import { defineConfig } from "drizzle-kit";
import path from "path"; import path from "path";
const schema = [ const schema = [path.join("server", "db", "sqlite", "schema")];
path.join("server", "db", "sqlite", "schema"),
];
export default defineConfig({ export default defineConfig({
dialect: "sqlite", dialect: "sqlite",

View File

@@ -24,20 +24,20 @@ const argv = yargs(hideBin(process.argv))
alias: "e", alias: "e",
describe: "Entry point file", describe: "Entry point file",
type: "string", type: "string",
demandOption: true, demandOption: true
}) })
.option("out", { .option("out", {
alias: "o", alias: "o",
describe: "Output file path", describe: "Output file path",
type: "string", type: "string",
demandOption: true, demandOption: true
}) })
.option("build", { .option("build", {
alias: "b", alias: "b",
describe: "Build type (oss, saas, enterprise)", describe: "Build type (oss, saas, enterprise)",
type: "string", type: "string",
choices: ["oss", "saas", "enterprise"], choices: ["oss", "saas", "enterprise"],
default: "oss", default: "oss"
}) })
.help() .help()
.alias("help", "h").argv; .alias("help", "h").argv;
@@ -66,7 +66,9 @@ function privateImportGuardPlugin() {
// Check if the importing file is NOT in server/private // Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile); const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private")); const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (!isInServerPrivate) { if (!isInServerPrivate) {
const violation = { const violation = {
@@ -79,8 +81,8 @@ function privateImportGuardPlugin() {
console.log(`PRIVATE IMPORT VIOLATION:`); console.log(`PRIVATE IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`); console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`); console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`); console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log(''); console.log("");
} }
// Return null to let the default resolver handle it // Return null to let the default resolver handle it
@@ -89,16 +91,20 @@ function privateImportGuardPlugin() {
build.onEnd((result) => { build.onEnd((result) => {
if (violations.length > 0) { if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} private import violation(s):`); console.log(
`\nSUMMARY: Found ${violations.length} private import violation(s):`
);
violations.forEach((v, i) => { violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`); console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
}); });
console.log(''); console.log("");
result.errors.push({ result.errors.push({
text: `Private import violations detected: ${violations.length} violation(s) found`, text: `Private import violations detected: ${violations.length} violation(s) found`,
location: null, location: null,
notes: violations.map(v => ({ notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`, text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null location: null
})) }))
@@ -121,7 +127,9 @@ function dynamicImportGuardPlugin() {
// Check if the importing file is NOT in server/private // Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile); const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private")); const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (isInServerPrivate) { if (isInServerPrivate) {
const violation = { const violation = {
@@ -134,8 +142,8 @@ function dynamicImportGuardPlugin() {
console.log(`DYNAMIC IMPORT VIOLATION:`); console.log(`DYNAMIC IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`); console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`); console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`); console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log(''); console.log("");
} }
// Return null to let the default resolver handle it // Return null to let the default resolver handle it
@@ -144,16 +152,20 @@ function dynamicImportGuardPlugin() {
build.onEnd((result) => { build.onEnd((result) => {
if (violations.length > 0) { if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`); console.log(
`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`
);
violations.forEach((v, i) => { violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`); console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
}); });
console.log(''); console.log("");
result.errors.push({ result.errors.push({
text: `Dynamic import violations detected: ${violations.length} violation(s) found`, text: `Dynamic import violations detected: ${violations.length} violation(s) found`,
location: null, location: null,
notes: violations.map(v => ({ notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`, text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null location: null
})) }))
@@ -172,21 +184,28 @@ function dynamicImportSwitcherPlugin(buildValue) {
const switches = []; const switches = [];
build.onStart(() => { build.onStart(() => {
console.log(`Dynamic import switcher using build type: ${buildValue}`); console.log(
`Dynamic import switcher using build type: ${buildValue}`
);
}); });
build.onResolve({ filter: /^#dynamic\// }, (args) => { build.onResolve({ filter: /^#dynamic\// }, (args) => {
// Extract the path after #dynamic/ // Extract the path after #dynamic/
const dynamicPath = args.path.replace(/^#dynamic\//, ''); const dynamicPath = args.path.replace(/^#dynamic\//, "");
// Determine the replacement based on build type // Determine the replacement based on build type
let replacement; let replacement;
if (buildValue === "oss") { if (buildValue === "oss") {
replacement = `#open/${dynamicPath}`; replacement = `#open/${dynamicPath}`;
} else if (buildValue === "saas" || buildValue === "enterprise") { } else if (
buildValue === "saas" ||
buildValue === "enterprise"
) {
replacement = `#closed/${dynamicPath}`; // We use #closed here so that the route guards dont complain after its been changed but this is the same as #private replacement = `#closed/${dynamicPath}`; // We use #closed here so that the route guards dont complain after its been changed but this is the same as #private
} else { } else {
console.warn(`Unknown build type '${buildValue}', defaulting to #open/`); console.warn(
`Unknown build type '${buildValue}', defaulting to #open/`
);
replacement = `#open/${dynamicPath}`; replacement = `#open/${dynamicPath}`;
} }
@@ -201,8 +220,10 @@ function dynamicImportSwitcherPlugin(buildValue) {
console.log(`DYNAMIC IMPORT SWITCH:`); console.log(`DYNAMIC IMPORT SWITCH:`);
console.log(` File: ${args.importer}`); console.log(` File: ${args.importer}`);
console.log(` Original: ${args.path}`); console.log(` Original: ${args.path}`);
console.log(` Switched to: ${replacement} (build: ${buildValue})`); console.log(
console.log(''); ` Switched to: ${replacement} (build: ${buildValue})`
);
console.log("");
// Rewrite the import path and let the normal resolution continue // Rewrite the import path and let the normal resolution continue
return build.resolve(replacement, { return build.resolve(replacement, {
@@ -215,12 +236,18 @@ function dynamicImportSwitcherPlugin(buildValue) {
build.onEnd((result) => { build.onEnd((result) => {
if (switches.length > 0) { if (switches.length > 0) {
console.log(`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`); console.log(
`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`
);
switches.forEach((s, i) => { switches.forEach((s, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), s.file)}`); console.log(
console.log(` ${s.originalPath} ${s.replacementPath}`); ` ${i + 1}. ${path.relative(process.cwd(), s.file)}`
);
console.log(
` ${s.originalPath}${s.replacementPath}`
);
}); });
console.log(''); console.log("");
} }
}); });
} }
@@ -235,7 +262,7 @@ esbuild
format: "esm", format: "esm",
minify: false, minify: false,
banner: { banner: {
js: banner, js: banner
}, },
platform: "node", platform: "node",
external: ["body-parser"], external: ["body-parser"],
@@ -244,20 +271,22 @@ esbuild
dynamicImportGuardPlugin(), dynamicImportGuardPlugin(),
dynamicImportSwitcherPlugin(argv.build), dynamicImportSwitcherPlugin(argv.build),
nodeExternalsPlugin({ nodeExternalsPlugin({
packagePath: getPackagePaths(), packagePath: getPackagePaths()
}), })
], ],
sourcemap: "inline", sourcemap: "inline",
target: "node22", target: "node22"
}) })
.then((result) => { .then((result) => {
// Check if there were any errors in the build result // Check if there were any errors in the build result
if (result.errors && result.errors.length > 0) { if (result.errors && result.errors.length > 0) {
console.error(`Build failed with ${result.errors.length} error(s):`); console.error(
`Build failed with ${result.errors.length} error(s):`
);
result.errors.forEach((error, i) => { result.errors.forEach((error, i) => {
console.error(`${i + 1}. ${error.text}`); console.error(`${i + 1}. ${error.text}`);
if (error.notes) { if (error.notes) {
error.notes.forEach(note => { error.notes.forEach((note) => {
console.error(` - ${note.text}`); console.error(` - ${note.text}`);
}); });
} }

View File

@@ -1,19 +1,19 @@
import tseslint from 'typescript-eslint'; import tseslint from "typescript-eslint";
export default tseslint.config({ export default tseslint.config({
files: ["**/*.{ts,tsx,js,jsx}"], files: ["**/*.{ts,tsx,js,jsx}"],
languageOptions: { languageOptions: {
parser: tseslint.parser, parser: tseslint.parser,
parserOptions: { parserOptions: {
ecmaVersion: "latest", ecmaVersion: "latest",
sourceType: "module", sourceType: "module",
ecmaFeatures: { ecmaFeatures: {
jsx: true jsx: true
} }
}
},
rules: {
semi: "error",
"prefer-const": "warn"
} }
}, });
rules: {
"semi": "error",
"prefer-const": "warn"
}
});

View File

@@ -1,8 +1,8 @@
/** @type {import('postcss-load-config').Config} */ /** @type {import('postcss-load-config').Config} */
const config = { const config = {
plugins: { plugins: {
"@tailwindcss/postcss": {}, "@tailwindcss/postcss": {}
}, }
}; };
export default config; export default config;

View File

@@ -2,13 +2,13 @@ import { hash, verify } from "@node-rs/argon2";
export async function verifyPassword( export async function verifyPassword(
password: string, password: string,
hash: string, hash: string
): Promise<boolean> { ): Promise<boolean> {
const validPassword = await verify(hash, password, { const validPassword = await verify(hash, password, {
memoryCost: 19456, memoryCost: 19456,
timeCost: 2, timeCost: 2,
outputLen: 32, outputLen: 32,
parallelism: 1, parallelism: 1
}); });
return validPassword; return validPassword;
} }
@@ -18,7 +18,7 @@ export async function hashPassword(password: string): Promise<string> {
memoryCost: 19456, memoryCost: 19456,
timeCost: 2, timeCost: 2,
outputLen: 32, outputLen: 32,
parallelism: 1, parallelism: 1
}); });
return passwordHash; return passwordHash;

View File

@@ -4,10 +4,13 @@ export const passwordSchema = z
.string() .string()
.min(8, { message: "Password must be at least 8 characters long" }) .min(8, { message: "Password must be at least 8 characters long" })
.max(128, { message: "Password must be at most 128 characters long" }) .max(128, { message: "Password must be at most 128 characters long" })
.regex(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/, { .regex(
message: `Your password must meet the following conditions: /^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/,
{
message: `Your password must meet the following conditions:
at least one uppercase English letter, at least one uppercase English letter,
at least one lowercase English letter, at least one lowercase English letter,
at least one digit, at least one digit,
at least one special character.` at least one special character.`
}); }
);

View File

@@ -1,6 +1,4 @@
import { import { encodeHexLowerCase } from "@oslojs/encoding";
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { Newt, newts, newtSessions, NewtSession } from "@server/db"; import { Newt, newts, newtSessions, NewtSession } from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createNewtSession( export async function createNewtSession(
token: string, token: string,
newtId: string, newtId: string
): Promise<NewtSession> { ): Promise<NewtSession> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const session: NewtSession = { const session: NewtSession = {
sessionId: sessionId, sessionId: sessionId,
newtId, newtId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(), expiresAt: new Date(Date.now() + EXPIRES).getTime()
}; };
await db.insert(newtSessions).values(session); await db.insert(newtSessions).values(session);
return session; return session;
} }
export async function validateNewtSessionToken( export async function validateNewtSessionToken(
token: string, token: string
): Promise<SessionValidationResult> { ): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await db
.select({ newt: newts, session: newtSessions }) .select({ newt: newts, session: newtSessions })
@@ -45,14 +43,12 @@ export async function validateNewtSessionToken(
.where(eq(newtSessions.sessionId, session.sessionId)); .where(eq(newtSessions.sessionId, session.sessionId));
return { session: null, newt: null }; return { session: null, newt: null };
} }
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) { if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date( session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
Date.now() + EXPIRES,
).getTime();
await db await db
.update(newtSessions) .update(newtSessions)
.set({ .set({
expiresAt: session.expiresAt, expiresAt: session.expiresAt
}) })
.where(eq(newtSessions.sessionId, session.sessionId)); .where(eq(newtSessions.sessionId, session.sessionId));
} }

View File

@@ -1,6 +1,4 @@
import { import { encodeHexLowerCase } from "@oslojs/encoding";
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { Olm, olms, olmSessions, OlmSession } from "@server/db"; import { Olm, olms, olmSessions, OlmSession } from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createOlmSession( export async function createOlmSession(
token: string, token: string,
olmId: string, olmId: string
): Promise<OlmSession> { ): Promise<OlmSession> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const session: OlmSession = { const session: OlmSession = {
sessionId: sessionId, sessionId: sessionId,
olmId, olmId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(), expiresAt: new Date(Date.now() + EXPIRES).getTime()
}; };
await db.insert(olmSessions).values(session); await db.insert(olmSessions).values(session);
return session; return session;
} }
export async function validateOlmSessionToken( export async function validateOlmSessionToken(
token: string, token: string
): Promise<SessionValidationResult> { ): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await db
.select({ olm: olms, session: olmSessions }) .select({ olm: olms, session: olmSessions })
@@ -45,14 +43,12 @@ export async function validateOlmSessionToken(
.where(eq(olmSessions.sessionId, session.sessionId)); .where(eq(olmSessions.sessionId, session.sessionId));
return { session: null, olm: null }; return { session: null, olm: null };
} }
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) { if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date( session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
Date.now() + EXPIRES,
).getTime();
await db await db
.update(olmSessions) .update(olmSessions)
.set({ .set({
expiresAt: session.expiresAt, expiresAt: session.expiresAt
}) })
.where(eq(olmSessions.sessionId, session.sessionId)); .where(eq(olmSessions.sessionId, session.sessionId));
} }

View File

@@ -10,4 +10,4 @@ export async function initCleanup() {
// Handle process termination // Handle process termination
process.on("SIGTERM", () => cleanup()); process.on("SIGTERM", () => cleanup());
process.on("SIGINT", () => cleanup()); process.on("SIGINT", () => cleanup());
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1708,4 +1708,4 @@
"Desert Box Turtle", "Desert Box Turtle",
"African Striped Weasel" "African Striped Weasel"
] ]
} }

View File

@@ -215,42 +215,56 @@ export const sessionTransferToken = pgTable("sessionTransferToken", {
expiresAt: bigint("expiresAt", { mode: "number" }).notNull() expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
}); });
export const actionAuditLog = pgTable("actionAuditLog", { export const actionAuditLog = pgTable(
id: serial("id").primaryKey(), "actionAuditLog",
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds {
orgId: varchar("orgId") id: serial("id").primaryKey(),
.notNull() timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: varchar("orgId")
actorType: varchar("actorType", { length: 50 }).notNull(), .notNull()
actor: varchar("actor", { length: 255 }).notNull(), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: varchar("actorId", { length: 255 }).notNull(), actorType: varchar("actorType", { length: 50 }).notNull(),
action: varchar("action", { length: 100 }).notNull(), actor: varchar("actor", { length: 255 }).notNull(),
metadata: text("metadata") actorId: varchar("actorId", { length: 255 }).notNull(),
}, (table) => ([ action: varchar("action", { length: 100 }).notNull(),
index("idx_actionAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = pgTable("accessAuditLog", { export const accessAuditLog = pgTable(
id: serial("id").primaryKey(), "accessAuditLog",
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds {
orgId: varchar("orgId") id: serial("id").primaryKey(),
.notNull() timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: varchar("orgId")
actorType: varchar("actorType", { length: 50 }), .notNull()
actor: varchar("actor", { length: 255 }), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: varchar("actorId", { length: 255 }), actorType: varchar("actorType", { length: 50 }),
resourceId: integer("resourceId"), actor: varchar("actor", { length: 255 }),
ip: varchar("ip", { length: 45 }), actorId: varchar("actorId", { length: 255 }),
type: varchar("type", { length: 100 }).notNull(), resourceId: integer("resourceId"),
action: boolean("action").notNull(), ip: varchar("ip", { length: 45 }),
location: text("location"), type: varchar("type", { length: 100 }).notNull(),
userAgent: text("userAgent"), action: boolean("action").notNull(),
metadata: text("metadata") location: text("location"),
}, (table) => ([ userAgent: text("userAgent"),
index("idx_identityAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>; export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>; export type Account = InferSelectModel<typeof account>;
@@ -270,4 +284,4 @@ export type RemoteExitNodeSession = InferSelectModel<
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>; export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>; export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>; export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>; export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -177,7 +177,7 @@ export const targetHealthCheck = pgTable("targetHealthCheck", {
hcMethod: varchar("hcMethod").default("GET"), hcMethod: varchar("hcMethod").default("GET"),
hcStatus: integer("hcStatus"), // http code hcStatus: integer("hcStatus"), // http code
hcHealth: text("hcHealth").default("unknown"), // "unknown", "healthy", "unhealthy" hcHealth: text("hcHealth").default("unknown"), // "unknown", "healthy", "unhealthy"
hcTlsServerName: text("hcTlsServerName"), hcTlsServerName: text("hcTlsServerName")
}); });
export const exitNodes = pgTable("exitNodes", { export const exitNodes = pgTable("exitNodes", {

View File

@@ -52,10 +52,7 @@ export async function getResourceByDomain(
resourceHeaderAuth, resourceHeaderAuth,
eq(resourceHeaderAuth.resourceId, resources.resourceId) eq(resourceHeaderAuth.resourceId, resources.resourceId)
) )
.innerJoin( .innerJoin(orgs, eq(orgs.orgId, resources.orgId))
orgs,
eq(orgs.orgId, resources.orgId)
)
.where(eq(resources.fullDomain, domain)) .where(eq(resources.fullDomain, domain))
.limit(1); .limit(1);

View File

@@ -8,7 +8,7 @@ const runMigrations = async () => {
console.log("Running migrations..."); console.log("Running migrations...");
try { try {
migrate(db as any, { migrate(db as any, {
migrationsFolder: migrationsFolder, migrationsFolder: migrationsFolder
}); });
console.log("Migrations completed successfully."); console.log("Migrations completed successfully.");
} catch (error) { } catch (error) {

View File

@@ -29,7 +29,9 @@ export const certificates = sqliteTable("certificates", {
}); });
export const dnsChallenge = sqliteTable("dnsChallenges", { export const dnsChallenge = sqliteTable("dnsChallenges", {
dnsChallengeId: integer("dnsChallengeId").primaryKey({ autoIncrement: true }), dnsChallengeId: integer("dnsChallengeId").primaryKey({
autoIncrement: true
}),
domain: text("domain").notNull(), domain: text("domain").notNull(),
token: text("token").notNull(), token: text("token").notNull(),
keyAuthorization: text("keyAuthorization").notNull(), keyAuthorization: text("keyAuthorization").notNull(),
@@ -61,9 +63,7 @@ export const customers = sqliteTable("customers", {
}); });
export const subscriptions = sqliteTable("subscriptions", { export const subscriptions = sqliteTable("subscriptions", {
subscriptionId: text("subscriptionId") subscriptionId: text("subscriptionId").primaryKey().notNull(),
.primaryKey()
.notNull(),
customerId: text("customerId") customerId: text("customerId")
.notNull() .notNull()
.references(() => customers.customerId, { onDelete: "cascade" }), .references(() => customers.customerId, { onDelete: "cascade" }),
@@ -75,7 +75,9 @@ export const subscriptions = sqliteTable("subscriptions", {
}); });
export const subscriptionItems = sqliteTable("subscriptionItems", { export const subscriptionItems = sqliteTable("subscriptionItems", {
subscriptionItemId: integer("subscriptionItemId").primaryKey({ autoIncrement: true }), subscriptionItemId: integer("subscriptionItemId").primaryKey({
autoIncrement: true
}),
subscriptionId: text("subscriptionId") subscriptionId: text("subscriptionId")
.notNull() .notNull()
.references(() => subscriptions.subscriptionId, { .references(() => subscriptions.subscriptionId, {
@@ -129,7 +131,9 @@ export const limits = sqliteTable("limits", {
}); });
export const usageNotifications = sqliteTable("usageNotifications", { export const usageNotifications = sqliteTable("usageNotifications", {
notificationId: integer("notificationId").primaryKey({ autoIncrement: true }), notificationId: integer("notificationId").primaryKey({
autoIncrement: true
}),
orgId: text("orgId") orgId: text("orgId")
.notNull() .notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }), .references(() => orgs.orgId, { onDelete: "cascade" }),
@@ -210,42 +214,56 @@ export const sessionTransferToken = sqliteTable("sessionTransferToken", {
expiresAt: integer("expiresAt").notNull() expiresAt: integer("expiresAt").notNull()
}); });
export const actionAuditLog = sqliteTable("actionAuditLog", { export const actionAuditLog = sqliteTable(
id: integer("id").primaryKey({ autoIncrement: true }), "actionAuditLog",
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds {
orgId: text("orgId") id: integer("id").primaryKey({ autoIncrement: true }),
.notNull() timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: text("orgId")
actorType: text("actorType").notNull(), .notNull()
actor: text("actor").notNull(), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: text("actorId").notNull(), actorType: text("actorType").notNull(),
action: text("action").notNull(), actor: text("actor").notNull(),
metadata: text("metadata") actorId: text("actorId").notNull(),
}, (table) => ([ action: text("action").notNull(),
index("idx_actionAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = sqliteTable("accessAuditLog", { export const accessAuditLog = sqliteTable(
id: integer("id").primaryKey({ autoIncrement: true }), "accessAuditLog",
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds {
orgId: text("orgId") id: integer("id").primaryKey({ autoIncrement: true }),
.notNull() timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: text("orgId")
actorType: text("actorType"), .notNull()
actor: text("actor"), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: text("actorId"), actorType: text("actorType"),
resourceId: integer("resourceId"), actor: text("actor"),
ip: text("ip"), actorId: text("actorId"),
location: text("location"), resourceId: integer("resourceId"),
type: text("type").notNull(), ip: text("ip"),
action: integer("action", { mode: "boolean" }).notNull(), location: text("location"),
userAgent: text("userAgent"), type: text("type").notNull(),
metadata: text("metadata") action: integer("action", { mode: "boolean" }).notNull(),
}, (table) => ([ userAgent: text("userAgent"),
index("idx_identityAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>; export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>; export type Account = InferSelectModel<typeof account>;
@@ -265,4 +283,4 @@ export type RemoteExitNodeSession = InferSelectModel<
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>; export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>; export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>; export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>; export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -18,10 +18,13 @@ function createEmailClient() {
host: emailConfig.smtp_host, host: emailConfig.smtp_host,
port: emailConfig.smtp_port, port: emailConfig.smtp_port,
secure: emailConfig.smtp_secure || false, secure: emailConfig.smtp_secure || false,
auth: (emailConfig.smtp_user && emailConfig.smtp_pass) ? { auth:
user: emailConfig.smtp_user, emailConfig.smtp_user && emailConfig.smtp_pass
pass: emailConfig.smtp_pass ? {
} : null user: emailConfig.smtp_user,
pass: emailConfig.smtp_pass
}
: null
} as SMTPTransport.Options; } as SMTPTransport.Options;
if (emailConfig.smtp_tls_reject_unauthorized !== undefined) { if (emailConfig.smtp_tls_reject_unauthorized !== undefined) {

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page billingLink: string; // Link to billing page
} }
export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => { export const NotifyUsageLimitApproaching = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `Your usage for ${limitName} is approaching the limit.`; const previewText = `Your usage for ${limitName} is approaching the limit.`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100); const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -37,23 +43,32 @@ export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, us
<EmailGreeting>Hi there,</EmailGreeting> <EmailGreeting>Hi there,</EmailGreeting>
<EmailText> <EmailText>
We wanted to let you know that your usage for <strong>{limitName}</strong> is approaching your plan limit. We wanted to let you know that your usage for{" "}
<strong>{limitName}</strong> is approaching your
plan limit.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%) <strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText> </EmailText>
<EmailText> <EmailText>
Once you reach your limit, some functionality may be restricted or your sites may disconnect until you upgrade your plan or your usage resets. Once you reach your limit, some functionality may be
restricted or your sites may disconnect until you
upgrade your plan or your usage resets.
</EmailText> </EmailText>
<EmailText> <EmailText>
To avoid any interruption to your service, we recommend upgrading your plan or monitoring your usage closely. You can <a href={billingLink}>upgrade your plan here</a>. To avoid any interruption to your service, we
recommend upgrading your plan or monitoring your
usage closely. You can{" "}
<a href={billingLink}>upgrade your plan here</a>.
</EmailText> </EmailText>
<EmailText> <EmailText>
If you have any questions or need assistance, please don't hesitate to reach out to our support team. If you have any questions or need assistance, please
don't hesitate to reach out to our support team.
</EmailText> </EmailText>
<EmailFooter> <EmailFooter>

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page billingLink: string; // Link to billing page
} }
export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => { export const NotifyUsageLimitReached = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `You've reached your ${limitName} usage limit - Action required`; const previewText = `You've reached your ${limitName} usage limit - Action required`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100); const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -32,30 +38,48 @@ export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageL
<EmailContainer> <EmailContainer>
<EmailLetterHead /> <EmailLetterHead />
<EmailHeading>Usage Limit Reached - Action Required</EmailHeading> <EmailHeading>
Usage Limit Reached - Action Required
</EmailHeading>
<EmailGreeting>Hi there,</EmailGreeting> <EmailGreeting>Hi there,</EmailGreeting>
<EmailText> <EmailText>
You have reached your usage limit for <strong>{limitName}</strong>. You have reached your usage limit for{" "}
<strong>{limitName}</strong>.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%) <strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Important:</strong> Your functionality may now be restricted and your sites may disconnect until you either upgrade your plan or your usage resets. To prevent any service interruption, immediate action is recommended. <strong>Important:</strong> Your functionality may
now be restricted and your sites may disconnect
until you either upgrade your plan or your usage
resets. To prevent any service interruption,
immediate action is recommended.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>What you can do:</strong> <strong>What you can do:</strong>
<br /> <a href={billingLink} style={{ color: '#2563eb', fontWeight: 'bold' }}>Upgrade your plan immediately</a> to restore full functionality <br />{" "}
<br /> Monitor your usage to stay within limits in the future <a
href={billingLink}
style={{ color: "#2563eb", fontWeight: "bold" }}
>
Upgrade your plan immediately
</a>{" "}
to restore full functionality
<br /> Monitor your usage to stay within limits in
the future
</EmailText> </EmailText>
<EmailText> <EmailText>
If you have any questions or need immediate assistance, please contact our support team right away. If you have any questions or need immediate
assistance, please contact our support team right
away.
</EmailText> </EmailText>
<EmailFooter> <EmailFooter>

View File

@@ -5,7 +5,7 @@ import config from "@server/lib/config";
import logger from "@server/logger"; import logger from "@server/logger";
import { import {
errorHandlerMiddleware, errorHandlerMiddleware,
notFoundMiddleware, notFoundMiddleware
} from "@server/middlewares"; } from "@server/middlewares";
import { authenticated, unauthenticated } from "#dynamic/routers/integration"; import { authenticated, unauthenticated } from "#dynamic/routers/integration";
import { logIncomingMiddleware } from "./middlewares/logIncoming"; import { logIncomingMiddleware } from "./middlewares/logIncoming";

View File

@@ -25,16 +25,22 @@ export const FeatureMeterIdsSandbox: Record<FeatureId, string> = {
}; };
export function getFeatureMeterId(featureId: FeatureId): string { export function getFeatureMeterId(featureId: FeatureId): string {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") { if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return FeatureMeterIds[featureId]; return FeatureMeterIds[featureId];
} else { } else {
return FeatureMeterIdsSandbox[featureId]; return FeatureMeterIdsSandbox[featureId];
} }
} }
export function getFeatureIdByMetricId(metricId: string): FeatureId | undefined { export function getFeatureIdByMetricId(
return (Object.entries(FeatureMeterIds) as [FeatureId, string][]) metricId: string
.find(([_, v]) => v === metricId)?.[0]; ): FeatureId | undefined {
return (Object.entries(FeatureMeterIds) as [FeatureId, string][]).find(
([_, v]) => v === metricId
)?.[0];
} }
export type FeaturePriceSet = { export type FeaturePriceSet = {
@@ -43,7 +49,8 @@ export type FeaturePriceSet = {
[FeatureId.DOMAINS]?: string; // Optional since domains are not billed [FeatureId.DOMAINS]?: string; // Optional since domains are not billed
}; };
export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches the freeLimitSet export const standardFeaturePriceSet: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF", [FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF",
[FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea", [FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea",
[FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk", [FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk",
@@ -51,7 +58,8 @@ export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches t
[FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h" [FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h"
}; };
export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier matches the freeLimitSet export const standardFeaturePriceSetSandbox: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU", [FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU",
[FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF", [FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF",
[FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0", [FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0",
@@ -60,15 +68,20 @@ export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier ma
}; };
export function getStandardFeaturePriceSet(): FeaturePriceSet { export function getStandardFeaturePriceSet(): FeaturePriceSet {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") { if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return standardFeaturePriceSet; return standardFeaturePriceSet;
} else { } else {
return standardFeaturePriceSetSandbox; return standardFeaturePriceSetSandbox;
} }
} }
export function getLineItems(featurePriceSet: FeaturePriceSet): Stripe.Checkout.SessionCreateParams.LineItem[] { export function getLineItems(
featurePriceSet: FeaturePriceSet
): Stripe.Checkout.SessionCreateParams.LineItem[] {
return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({ return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({
price: priceId, price: priceId
})); }));
} }

View File

@@ -2,4 +2,4 @@ export * from "./limitSet";
export * from "./features"; export * from "./features";
export * from "./limitsService"; export * from "./limitsService";
export * from "./getOrgTierData"; export * from "./getOrgTierData";
export * from "./createCustomer"; export * from "./createCustomer";

View File

@@ -12,7 +12,7 @@ export const sandboxLimitSet: LimitSet = {
[FeatureId.USERS]: { value: 1, description: "Sandbox limit" }, [FeatureId.USERS]: { value: 1, description: "Sandbox limit" },
[FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB [FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB
[FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" }, [FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" }, [FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" }
}; };
export const freeLimitSet: LimitSet = { export const freeLimitSet: LimitSet = {
@@ -29,7 +29,7 @@ export const freeLimitSet: LimitSet = {
export const subscribedLimitSet: LimitSet = { export const subscribedLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: { [FeatureId.SITE_UPTIME]: {
value: 2232000, value: 2232000,
description: "Contact us to increase soft limit.", description: "Contact us to increase soft limit."
}, // 50 sites up for 31 days }, // 50 sites up for 31 days
[FeatureId.USERS]: { [FeatureId.USERS]: {
value: 150, value: 150,
@@ -38,7 +38,7 @@ export const subscribedLimitSet: LimitSet = {
[FeatureId.EGRESS_DATA_MB]: { [FeatureId.EGRESS_DATA_MB]: {
value: 12000000, value: 12000000,
description: "Contact us to increase soft limit." description: "Contact us to increase soft limit."
}, // 12000 GB }, // 12000 GB
[FeatureId.DOMAINS]: { [FeatureId.DOMAINS]: {
value: 25, value: 25,
description: "Contact us to increase soft limit." description: "Contact us to increase soft limit."

View File

@@ -1,22 +1,32 @@
export enum TierId { export enum TierId {
STANDARD = "standard", STANDARD = "standard"
} }
export type TierPriceSet = { export type TierPriceSet = {
[key in TierId]: string; [key in TierId]: string;
}; };
export const tierPriceSet: TierPriceSet = { // Free tier matches the freeLimitSet export const tierPriceSet: TierPriceSet = {
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0", // Free tier matches the freeLimitSet
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0"
}; };
export const tierPriceSetSandbox: TierPriceSet = { // Free tier matches the freeLimitSet export const tierPriceSetSandbox: TierPriceSet = {
// Free tier matches the freeLimitSet
// when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value // when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value
[TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m", [TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m"
}; };
export function getTierPriceSet(environment?: string, sandbox_mode?: boolean): TierPriceSet { export function getTierPriceSet(
if ((process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") || (environment === "prod" && sandbox_mode !== true)) { // THIS GETS LOADED CLIENT SIDE AND SERVER SIDE environment?: string,
sandbox_mode?: boolean
): TierPriceSet {
if (
(process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true") ||
(environment === "prod" && sandbox_mode !== true)
) {
// THIS GETS LOADED CLIENT SIDE AND SERVER SIDE
return tierPriceSet; return tierPriceSet;
} else { } else {
return tierPriceSetSandbox; return tierPriceSetSandbox;

View File

@@ -19,7 +19,7 @@ import logger from "@server/logger";
import { sendToClient } from "#dynamic/routers/ws"; import { sendToClient } from "#dynamic/routers/ws";
import { build } from "@server/build"; import { build } from "@server/build";
import { s3Client } from "@server/lib/s3"; import { s3Client } from "@server/lib/s3";
import cache from "@server/lib/cache"; import cache from "@server/lib/cache";
interface StripeEvent { interface StripeEvent {
identifier?: string; identifier?: string;

View File

@@ -34,7 +34,10 @@ export async function applyNewtDockerBlueprint(
return; return;
} }
if (isEmptyObject(blueprint["proxy-resources"]) && isEmptyObject(blueprint["client-resources"])) { if (
isEmptyObject(blueprint["proxy-resources"]) &&
isEmptyObject(blueprint["client-resources"])
) {
return; return;
} }

View File

@@ -84,12 +84,20 @@ export function processContainerLabels(containers: Container[]): {
// Process proxy resources // Process proxy resources
if (Object.keys(proxyResourceLabels).length > 0) { if (Object.keys(proxyResourceLabels).length > 0) {
processResourceLabels(proxyResourceLabels, container, result["proxy-resources"]); processResourceLabels(
proxyResourceLabels,
container,
result["proxy-resources"]
);
} }
// Process client resources // Process client resources
if (Object.keys(clientResourceLabels).length > 0) { if (Object.keys(clientResourceLabels).length > 0) {
processResourceLabels(clientResourceLabels, container, result["client-resources"]); processResourceLabels(
clientResourceLabels,
container,
result["client-resources"]
);
} }
}); });
@@ -161,8 +169,7 @@ function processResourceLabels(
const finalTarget = { ...target }; const finalTarget = { ...target };
if (!finalTarget.hostname) { if (!finalTarget.hostname) {
finalTarget.hostname = finalTarget.hostname =
container.name || container.name || container.hostname;
container.hostname;
} }
if (!finalTarget.port) { if (!finalTarget.port) {
const containerPort = const containerPort =

View File

@@ -312,7 +312,7 @@ export const ConfigSchema = z
}; };
delete (data as any)["public-resources"]; delete (data as any)["public-resources"];
} }
// Merge private-resources into client-resources // Merge private-resources into client-resources
if (data["private-resources"]) { if (data["private-resources"]) {
data["client-resources"] = { data["client-resources"] = {
@@ -321,10 +321,13 @@ export const ConfigSchema = z
}; };
delete (data as any)["private-resources"]; delete (data as any)["private-resources"];
} }
return data as { return data as {
"proxy-resources": Record<string, z.infer<typeof ResourceSchema>>; "proxy-resources": Record<string, z.infer<typeof ResourceSchema>>;
"client-resources": Record<string, z.infer<typeof ClientResourceSchema>>; "client-resources": Record<
string,
z.infer<typeof ClientResourceSchema>
>;
sites: Record<string, z.infer<typeof SiteSchema>>; sites: Record<string, z.infer<typeof SiteSchema>>;
}; };
}) })

View File

@@ -2,4 +2,4 @@ import NodeCache from "node-cache";
export const cache = new NodeCache({ stdTTL: 3600, checkperiod: 120 }); export const cache = new NodeCache({ stdTTL: 3600, checkperiod: 120 });
export default cache; export default cache;

View File

@@ -166,7 +166,10 @@ export async function calculateUserClientsForOrgs(
]; ];
// Get next available subnet // Get next available subnet
const newSubnet = await getNextAvailableClientSubnet(orgId, transaction); const newSubnet = await getNextAvailableClientSubnet(
orgId,
transaction
);
if (!newSubnet) { if (!newSubnet) {
logger.warn( logger.warn(
`Skipping org ${orgId} for OLM ${olm.olmId} (user ${userId}): no available subnet found` `Skipping org ${orgId} for OLM ${olm.olmId} (user ${userId}): no available subnet found`

View File

@@ -1,4 +1,6 @@
export async function getValidCertificatesForDomains(domains: Set<string>): Promise< export async function getValidCertificatesForDomains(
domains: Set<string>
): Promise<
Array<{ Array<{
id: number; id: number;
domain: string; domain: string;
@@ -10,4 +12,4 @@ export async function getValidCertificatesForDomains(domains: Set<string>): Prom
}> }>
> { > {
return []; // stub return []; // stub
} }

View File

@@ -7,7 +7,10 @@ function dateToTimestamp(dateStr: string): number {
// Testable version of calculateCutoffTimestamp that accepts a "now" timestamp // Testable version of calculateCutoffTimestamp that accepts a "now" timestamp
// This matches the logic in cleanupLogs.ts but allows injecting the current time // This matches the logic in cleanupLogs.ts but allows injecting the current time
function calculateCutoffTimestampWithNow(retentionDays: number, nowTimestamp: number): number { function calculateCutoffTimestampWithNow(
retentionDays: number,
nowTimestamp: number
): number {
if (retentionDays === 9001) { if (retentionDays === 9001) {
// Special case: data is erased at the end of the year following the year it was generated // Special case: data is erased at the end of the year following the year it was generated
// This means we delete logs from 2 years ago or older (logs from year Y are deleted after Dec 31 of year Y+1) // This means we delete logs from 2 years ago or older (logs from year Y are deleted after Dec 31 of year Y+1)
@@ -28,7 +31,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(30, now); const result = calculateCutoffTimestampWithNow(30, now);
const expected = now - (30 * 24 * 60 * 60); const expected = now - 30 * 24 * 60 * 60;
assertEquals(result, expected, "30 days retention calculation failed"); assertEquals(result, expected, "30 days retention calculation failed");
} }
@@ -36,7 +39,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-06-15T00:00:00Z"); const now = dateToTimestamp("2025-06-15T00:00:00Z");
const result = calculateCutoffTimestampWithNow(90, now); const result = calculateCutoffTimestampWithNow(90, now);
const expected = now - (90 * 24 * 60 * 60); const expected = now - 90 * 24 * 60 * 60;
assertEquals(result, expected, "90 days retention calculation failed"); assertEquals(result, expected, "90 days retention calculation failed");
} }
@@ -48,7 +51,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 2025) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Dec 2025) - should cutoff at Jan 1, 2024"
);
} }
// Test 4: Special case 9001 - January 2026 // Test 4: Special case 9001 - January 2026
@@ -58,7 +65,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-15T12:00:00Z"); const now = dateToTimestamp("2026-01-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z"); const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 2026) - should cutoff at Jan 1, 2025"); assertEquals(
result,
expected,
"9001 retention (Jan 2026) - should cutoff at Jan 1, 2025"
);
} }
// Test 5: Special case 9001 - December 31, 2025 at 23:59:59 UTC // Test 5: Special case 9001 - December 31, 2025 at 23:59:59 UTC
@@ -68,7 +79,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-31T23:59:59Z"); const now = dateToTimestamp("2025-12-31T23:59:59Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024"
);
} }
// Test 6: Special case 9001 - January 1, 2026 at 00:00:01 UTC // Test 6: Special case 9001 - January 1, 2026 at 00:00:01 UTC
@@ -78,7 +93,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-01T00:00:01Z"); const now = dateToTimestamp("2026-01-01T00:00:01Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z"); const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025"); assertEquals(
result,
expected,
"9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025"
);
} }
// Test 7: Special case 9001 - Mid year 2025 // Test 7: Special case 9001 - Mid year 2025
@@ -87,7 +106,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-06-15T12:00:00Z"); const now = dateToTimestamp("2025-06-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (mid 2025) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (mid 2025) - should cutoff at Jan 1, 2024"
);
} }
// Test 8: Special case 9001 - Early 2024 // Test 8: Special case 9001 - Early 2024
@@ -96,14 +119,18 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2024-02-01T12:00:00Z"); const now = dateToTimestamp("2024-02-01T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2023-01-01T00:00:00Z"); const expected = dateToTimestamp("2023-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (early 2024) - should cutoff at Jan 1, 2023"); assertEquals(
result,
expected,
"9001 retention (early 2024) - should cutoff at Jan 1, 2023"
);
} }
// Test 9: 1 day retention // Test 9: 1 day retention
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(1, now); const result = calculateCutoffTimestampWithNow(1, now);
const expected = now - (1 * 24 * 60 * 60); const expected = now - 1 * 24 * 60 * 60;
assertEquals(result, expected, "1 day retention calculation failed"); assertEquals(result, expected, "1 day retention calculation failed");
} }
@@ -111,7 +138,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(365, now); const result = calculateCutoffTimestampWithNow(365, now);
const expected = now - (365 * 24 * 60 * 60); const expected = now - 365 * 24 * 60 * 60;
assertEquals(result, expected, "365 days retention calculation failed"); assertEquals(result, expected, "365 days retention calculation failed");
} }
@@ -123,11 +150,19 @@ function testCalculateCutoffTimestamp() {
const cutoff = calculateCutoffTimestampWithNow(9001, now); const cutoff = calculateCutoffTimestampWithNow(9001, now);
const logFromDec2023 = dateToTimestamp("2023-12-31T23:59:59Z"); const logFromDec2023 = dateToTimestamp("2023-12-31T23:59:59Z");
const logFromJan2024 = dateToTimestamp("2024-01-01T00:00:00Z"); const logFromJan2024 = dateToTimestamp("2024-01-01T00:00:00Z");
// Log from Dec 2023 should be before cutoff (deleted) // Log from Dec 2023 should be before cutoff (deleted)
assertEquals(logFromDec2023 < cutoff, true, "Log from Dec 2023 should be deleted"); assertEquals(
logFromDec2023 < cutoff,
true,
"Log from Dec 2023 should be deleted"
);
// Log from Jan 2024 should be at or after cutoff (kept) // Log from Jan 2024 should be at or after cutoff (kept)
assertEquals(logFromJan2024 >= cutoff, true, "Log from Jan 2024 should be kept"); assertEquals(
logFromJan2024 >= cutoff,
true,
"Log from Jan 2024 should be kept"
);
} }
// Test 12: Verify 9001 in 2026 - logs from 2024 should now be deleted // Test 12: Verify 9001 in 2026 - logs from 2024 should now be deleted
@@ -136,11 +171,19 @@ function testCalculateCutoffTimestamp() {
const cutoff = calculateCutoffTimestampWithNow(9001, now); const cutoff = calculateCutoffTimestampWithNow(9001, now);
const logFromDec2024 = dateToTimestamp("2024-12-31T23:59:59Z"); const logFromDec2024 = dateToTimestamp("2024-12-31T23:59:59Z");
const logFromJan2025 = dateToTimestamp("2025-01-01T00:00:00Z"); const logFromJan2025 = dateToTimestamp("2025-01-01T00:00:00Z");
// Log from Dec 2024 should be before cutoff (deleted) // Log from Dec 2024 should be before cutoff (deleted)
assertEquals(logFromDec2024 < cutoff, true, "Log from Dec 2024 should be deleted in 2026"); assertEquals(
logFromDec2024 < cutoff,
true,
"Log from Dec 2024 should be deleted in 2026"
);
// Log from Jan 2025 should be at or after cutoff (kept) // Log from Jan 2025 should be at or after cutoff (kept)
assertEquals(logFromJan2025 >= cutoff, true, "Log from Jan 2025 should be kept in 2026"); assertEquals(
logFromJan2025 >= cutoff,
true,
"Log from Jan 2025 should be kept in 2026"
);
} }
// Test 13: Edge case - exactly at year boundary for 9001 // Test 13: Edge case - exactly at year boundary for 9001
@@ -149,7 +192,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-01-01T00:00:00Z"); const now = dateToTimestamp("2025-01-01T00:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024"
);
} }
// Test 14: Verify data from 2024 is kept throughout 2025 when using 9001 // Test 14: Verify data from 2024 is kept throughout 2025 when using 9001
@@ -157,18 +204,29 @@ function testCalculateCutoffTimestamp() {
{ {
// Running in June 2025 // Running in June 2025
const nowJune2025 = dateToTimestamp("2025-06-15T12:00:00Z"); const nowJune2025 = dateToTimestamp("2025-06-15T12:00:00Z");
const cutoffJune2025 = calculateCutoffTimestampWithNow(9001, nowJune2025); const cutoffJune2025 = calculateCutoffTimestampWithNow(
9001,
nowJune2025
);
const logFromJuly2024 = dateToTimestamp("2024-07-15T12:00:00Z"); const logFromJuly2024 = dateToTimestamp("2024-07-15T12:00:00Z");
// Log from July 2024 should be KEPT in June 2025 // Log from July 2024 should be KEPT in June 2025
assertEquals(logFromJuly2024 >= cutoffJune2025, true, "Log from July 2024 should be kept in June 2025"); assertEquals(
logFromJuly2024 >= cutoffJune2025,
true,
"Log from July 2024 should be kept in June 2025"
);
// Running in January 2026 // Running in January 2026
const nowJan2026 = dateToTimestamp("2026-01-15T12:00:00Z"); const nowJan2026 = dateToTimestamp("2026-01-15T12:00:00Z");
const cutoffJan2026 = calculateCutoffTimestampWithNow(9001, nowJan2026); const cutoffJan2026 = calculateCutoffTimestampWithNow(9001, nowJan2026);
// Log from July 2024 should be DELETED in January 2026 // Log from July 2024 should be DELETED in January 2026
assertEquals(logFromJuly2024 < cutoffJan2026, true, "Log from July 2024 should be deleted in Jan 2026"); assertEquals(
logFromJuly2024 < cutoffJan2026,
true,
"Log from July 2024 should be deleted in Jan 2026"
);
} }
// Test 15: Verify the exact requirement - data from 2024 must be purged on December 31, 2025 // Test 15: Verify the exact requirement - data from 2024 must be purged on December 31, 2025
@@ -176,16 +234,27 @@ function testCalculateCutoffTimestamp() {
// On Jan 1, 2026 (now 2026), data from 2024 can be deleted // On Jan 1, 2026 (now 2026), data from 2024 can be deleted
{ {
const logFromMid2024 = dateToTimestamp("2024-06-15T12:00:00Z"); const logFromMid2024 = dateToTimestamp("2024-06-15T12:00:00Z");
// Dec 31, 2025 23:59:59 - still 2025, log should be kept // Dec 31, 2025 23:59:59 - still 2025, log should be kept
const nowDec31_2025 = dateToTimestamp("2025-12-31T23:59:59Z"); const nowDec31_2025 = dateToTimestamp("2025-12-31T23:59:59Z");
const cutoffDec31 = calculateCutoffTimestampWithNow(9001, nowDec31_2025); const cutoffDec31 = calculateCutoffTimestampWithNow(
assertEquals(logFromMid2024 >= cutoffDec31, true, "Log from mid-2024 should be kept on Dec 31, 2025"); 9001,
nowDec31_2025
);
assertEquals(
logFromMid2024 >= cutoffDec31,
true,
"Log from mid-2024 should be kept on Dec 31, 2025"
);
// Jan 1, 2026 00:00:00 - now 2026, log can be deleted // Jan 1, 2026 00:00:00 - now 2026, log can be deleted
const nowJan1_2026 = dateToTimestamp("2026-01-01T00:00:00Z"); const nowJan1_2026 = dateToTimestamp("2026-01-01T00:00:00Z");
const cutoffJan1 = calculateCutoffTimestampWithNow(9001, nowJan1_2026); const cutoffJan1 = calculateCutoffTimestampWithNow(9001, nowJan1_2026);
assertEquals(logFromMid2024 < cutoffJan1, true, "Log from mid-2024 should be deleted on Jan 1, 2026"); assertEquals(
logFromMid2024 < cutoffJan1,
true,
"Log from mid-2024 should be deleted on Jan 1, 2026"
);
} }
console.log("All calculateCutoffTimestamp tests passed!"); console.log("All calculateCutoffTimestamp tests passed!");

View File

@@ -4,18 +4,20 @@ import { eq, and } from "drizzle-orm";
import { subdomainSchema } from "@server/lib/schemas"; import { subdomainSchema } from "@server/lib/schemas";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
export type DomainValidationResult = { export type DomainValidationResult =
success: true; | {
fullDomain: string; success: true;
subdomain: string | null; fullDomain: string;
} | { subdomain: string | null;
success: false; }
error: string; | {
}; success: false;
error: string;
};
/** /**
* Validates a domain and constructs the full domain based on domain type and subdomain. * Validates a domain and constructs the full domain based on domain type and subdomain.
* *
* @param domainId - The ID of the domain to validate * @param domainId - The ID of the domain to validate
* @param orgId - The organization ID to check domain access * @param orgId - The organization ID to check domain access
* @param subdomain - Optional subdomain to append (for ns and wildcard domains) * @param subdomain - Optional subdomain to append (for ns and wildcard domains)
@@ -34,7 +36,10 @@ export async function validateAndConstructDomain(
.where(eq(domains.domainId, domainId)) .where(eq(domains.domainId, domainId))
.leftJoin( .leftJoin(
orgDomains, orgDomains,
and(eq(orgDomains.orgId, orgId), eq(orgDomains.domainId, domainId)) and(
eq(orgDomains.orgId, orgId),
eq(orgDomains.domainId, domainId)
)
); );
// Check if domain exists // Check if domain exists
@@ -106,7 +111,7 @@ export async function validateAndConstructDomain(
} catch (error) { } catch (error) {
return { return {
success: false, success: false,
error: `An error occurred while validating domain: ${error instanceof Error ? error.message : 'Unknown error'}` error: `An error occurred while validating domain: ${error instanceof Error ? error.message : "Unknown error"}`
}; };
} }
} }

View File

@@ -1,39 +1,39 @@
import crypto from 'crypto'; import crypto from "crypto";
export function encryptData(data: string, key: Buffer): string { export function encryptData(data: string, key: Buffer): string {
const algorithm = 'aes-256-gcm'; const algorithm = "aes-256-gcm";
const iv = crypto.randomBytes(16); const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv); const cipher = crypto.createCipheriv(algorithm, key, iv);
let encrypted = cipher.update(data, 'utf8', 'hex'); let encrypted = cipher.update(data, "utf8", "hex");
encrypted += cipher.final('hex'); encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag(); const authTag = cipher.getAuthTag();
// Combine IV, auth tag, and encrypted data // Combine IV, auth tag, and encrypted data
return iv.toString('hex') + ':' + authTag.toString('hex') + ':' + encrypted; return iv.toString("hex") + ":" + authTag.toString("hex") + ":" + encrypted;
} }
// Helper function to decrypt data (you'll need this to read certificates) // Helper function to decrypt data (you'll need this to read certificates)
export function decryptData(encryptedData: string, key: Buffer): string { export function decryptData(encryptedData: string, key: Buffer): string {
const algorithm = 'aes-256-gcm'; const algorithm = "aes-256-gcm";
const parts = encryptedData.split(':'); const parts = encryptedData.split(":");
if (parts.length !== 3) { if (parts.length !== 3) {
throw new Error('Invalid encrypted data format'); throw new Error("Invalid encrypted data format");
} }
const iv = Buffer.from(parts[0], 'hex'); const iv = Buffer.from(parts[0], "hex");
const authTag = Buffer.from(parts[1], 'hex'); const authTag = Buffer.from(parts[1], "hex");
const encrypted = parts[2]; const encrypted = parts[2];
const decipher = crypto.createDecipheriv(algorithm, key, iv); const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag); decipher.setAuthTag(authTag);
let decrypted = decipher.update(encrypted, 'hex', 'utf8'); let decrypted = decipher.update(encrypted, "hex", "utf8");
decrypted += decipher.final('utf8'); decrypted += decipher.final("utf8");
return decrypted; return decrypted;
} }
// openssl rand -hex 32 > config/encryption.key // openssl rand -hex 32 > config/encryption.key

View File

@@ -30,4 +30,4 @@ export async function getCurrentExitNodeId(): Promise<number> {
} }
} }
return currentExitNodeId; return currentExitNodeId;
} }

View File

@@ -1,4 +1,4 @@
export * from "./exitNodes"; export * from "./exitNodes";
export * from "./exitNodeComms"; export * from "./exitNodeComms";
export * from "./subnet"; export * from "./subnet";
export * from "./getCurrentExitNodeId"; export * from "./getCurrentExitNodeId";

View File

@@ -27,4 +27,4 @@ export async function getNextAvailableSubnet(): Promise<string> {
"/" + "/" +
subnet.split("/")[1]; subnet.split("/")[1];
return subnet; return subnet;
} }

View File

@@ -30,4 +30,4 @@ export async function getCountryCodeForIp(
} }
return; return;
} }

View File

@@ -33,7 +33,11 @@ export async function generateOidcRedirectUrl(
) )
.limit(1); .limit(1);
if (res?.loginPage && res.loginPage.domainId && res.loginPage.fullDomain) { if (
res?.loginPage &&
res.loginPage.domainId &&
res.loginPage.fullDomain
) {
baseUrl = `${method}://${res.loginPage.fullDomain}`; baseUrl = `${method}://${res.loginPage.fullDomain}`;
} }
} }

View File

@@ -4,7 +4,7 @@ import { assertEquals } from "@test/assert";
// Test cases // Test cases
function testFindNextAvailableCidr() { function testFindNextAvailableCidr() {
console.log("Running findNextAvailableCidr tests..."); console.log("Running findNextAvailableCidr tests...");
// Test 0: Basic IPv4 allocation with a subnet in the wrong range // Test 0: Basic IPv4 allocation with a subnet in the wrong range
{ {
const existing = ["100.90.130.1/30", "100.90.128.4/30"]; const existing = ["100.90.130.1/30", "100.90.128.4/30"];
@@ -23,7 +23,11 @@ function testFindNextAvailableCidr() {
{ {
const existing = ["10.0.0.0/16", "10.2.0.0/16"]; const existing = ["10.0.0.0/16", "10.2.0.0/16"];
const result = findNextAvailableCidr(existing, 16, "10.0.0.0/8"); const result = findNextAvailableCidr(existing, 16, "10.0.0.0/8");
assertEquals(result, "10.1.0.0/16", "Finding gap between allocations failed"); assertEquals(
result,
"10.1.0.0/16",
"Finding gap between allocations failed"
);
} }
// Test 3: No available space // Test 3: No available space
@@ -33,7 +37,7 @@ function testFindNextAvailableCidr() {
assertEquals(result, null, "No available space test failed"); assertEquals(result, null, "No available space test failed");
} }
// Test 4: Empty existing // Test 4: Empty existing
{ {
const existing: string[] = []; const existing: string[] = [];
const result = findNextAvailableCidr(existing, 30, "10.0.0.0/8"); const result = findNextAvailableCidr(existing, 30, "10.0.0.0/8");
@@ -137,4 +141,4 @@ try {
} catch (error) { } catch (error) {
console.error("Test failed:", error); console.error("Test failed:", error);
process.exit(1); process.exit(1);
} }

View File

@@ -247,7 +247,10 @@ export async function getNextAvailableClientSubnet(
orgId: string, orgId: string,
transaction: Transaction | typeof db = db transaction: Transaction | typeof db = db
): Promise<string> { ): Promise<string> {
const [org] = await transaction.select().from(orgs).where(eq(orgs.orgId, orgId)); const [org] = await transaction
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId));
if (!org) { if (!org) {
throw new Error(`Organization with ID ${orgId} not found`); throw new Error(`Organization with ID ${orgId} not found`);
@@ -360,7 +363,9 @@ export async function getNextAvailableOrgSubnet(): Promise<string> {
return subnet; return subnet;
} }
export function generateRemoteSubnets(allSiteResources: SiteResource[]): string[] { export function generateRemoteSubnets(
allSiteResources: SiteResource[]
): string[] {
const remoteSubnets = allSiteResources const remoteSubnets = allSiteResources
.filter((sr) => { .filter((sr) => {
if (sr.mode === "cidr") return true; if (sr.mode === "cidr") return true;

View File

@@ -14,4 +14,4 @@ export async function logAccessAudit(data: {
requestIp?: string; requestIp?: string;
}) { }) {
return; return;
} }

View File

@@ -14,7 +14,8 @@ export const configSchema = z
.object({ .object({
app: z app: z
.object({ .object({
dashboard_url: z.url() dashboard_url: z
.url()
.pipe(z.url()) .pipe(z.url())
.transform((url) => url.toLowerCase()) .transform((url) => url.toLowerCase())
.optional(), .optional(),
@@ -255,7 +256,10 @@ export const configSchema = z
.object({ .object({
block_size: z.number().positive().gt(0).optional().default(24), block_size: z.number().positive().gt(0).optional().default(24),
subnet_group: z.string().optional().default("100.90.128.0/24"), subnet_group: z.string().optional().default("100.90.128.0/24"),
utility_subnet_group: z.string().optional().default("100.96.128.0/24") //just hardcode this for now as well utility_subnet_group: z
.string()
.optional()
.default("100.96.128.0/24") //just hardcode this for now as well
}) })
.optional() .optional()
.default({ .default({

View File

@@ -32,7 +32,7 @@ import logger from "@server/logger";
import { import {
generateAliasConfig, generateAliasConfig,
generateRemoteSubnets, generateRemoteSubnets,
generateSubnetProxyTargets, generateSubnetProxyTargets
} from "@server/lib/ip"; } from "@server/lib/ip";
import { import {
addPeerData, addPeerData,
@@ -109,21 +109,22 @@ export async function getClientSiteResourceAccess(
const directClientIds = allClientSiteResources.map((row) => row.clientId); const directClientIds = allClientSiteResources.map((row) => row.clientId);
// Get full client details for directly associated clients // Get full client details for directly associated clients
const directClients = directClientIds.length > 0 const directClients =
? await trx directClientIds.length > 0
.select({ ? await trx
clientId: clients.clientId, .select({
pubKey: clients.pubKey, clientId: clients.clientId,
subnet: clients.subnet pubKey: clients.pubKey,
}) subnet: clients.subnet
.from(clients) })
.where( .from(clients)
and( .where(
inArray(clients.clientId, directClientIds), and(
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations inArray(clients.clientId, directClientIds),
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations
)
) )
) : [];
: [];
// Merge user-based clients with directly associated clients // Merge user-based clients with directly associated clients
const allClientsMap = new Map( const allClientsMap = new Map(
@@ -731,9 +732,10 @@ async function handleSubnetProxyTargetUpdates(
); );
// Only remove remote subnet if no other resource uses the same destination // Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove = destinationStillInUse.length > 0 const remoteSubnetsToRemove =
? [] destinationStillInUse.length > 0
: generateRemoteSubnets([siteResource]); ? []
: generateRemoteSubnets([siteResource]);
olmJobs.push( olmJobs.push(
removePeerData( removePeerData(
@@ -817,7 +819,10 @@ export async function rebuildClientAssociationsFromClient(
.from(roleSiteResources) .from(roleSiteResources)
.innerJoin( .innerJoin(
siteResources, siteResources,
eq(siteResources.siteResourceId, roleSiteResources.siteResourceId) eq(
siteResources.siteResourceId,
roleSiteResources.siteResourceId
)
) )
.where( .where(
and( and(
@@ -1277,9 +1282,10 @@ async function handleMessagesForClientResources(
); );
// Only remove remote subnet if no other resource uses the same destination // Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove = destinationStillInUse.length > 0 const remoteSubnetsToRemove =
? [] destinationStillInUse.length > 0
: generateRemoteSubnets([resource]); ? []
: generateRemoteSubnets([resource]);
// Remove peer data from olm // Remove peer data from olm
olmJobs.push( olmJobs.push(

View File

@@ -1,8 +1,8 @@
export enum AudienceIds { export enum AudienceIds {
SignUps = "", SignUps = "",
Subscribed = "", Subscribed = "",
Churned = "", Churned = "",
Newsletter = "" Newsletter = ""
} }
let resend; let resend;

View File

@@ -3,14 +3,14 @@ import { Response } from "express";
export const response = <T>( export const response = <T>(
res: Response, res: Response,
{ data, success, error, message, status }: ResponseT<T>, { data, success, error, message, status }: ResponseT<T>
) => { ) => {
return res.status(status).send({ return res.status(status).send({
data, data,
success, success,
error, error,
message, message,
status, status
}); });
}; };

View File

@@ -1,5 +1,5 @@
import { S3Client } from "@aws-sdk/client-s3"; import { S3Client } from "@aws-sdk/client-s3";
export const s3Client = new S3Client({ export const s3Client = new S3Client({
region: process.env.S3_REGION || "us-east-1", region: process.env.S3_REGION || "us-east-1"
}); });

View File

@@ -6,7 +6,7 @@ let serverIp: string | null = null;
const services = [ const services = [
"https://checkip.amazonaws.com", "https://checkip.amazonaws.com",
"https://ifconfig.io/ip", "https://ifconfig.io/ip",
"https://api.ipify.org", "https://api.ipify.org"
]; ];
export async function fetchServerIp() { export async function fetchServerIp() {
@@ -17,7 +17,9 @@ export async function fetchServerIp() {
logger.debug("Detected public IP: " + serverIp); logger.debug("Detected public IP: " + serverIp);
return; return;
} catch (err: any) { } catch (err: any) {
console.warn(`Failed to fetch server IP from ${url}: ${err.message || err.code}`); console.warn(
`Failed to fetch server IP from ${url}: ${err.message || err.code}`
);
} }
} }

View File

@@ -1,8 +1,7 @@
export default function stoi(val: any) { export default function stoi(val: any) {
if (typeof val === "string") { if (typeof val === "string") {
return parseInt(val); return parseInt(val);
} else {
return val;
} }
else { }
return val;
}
}

View File

@@ -195,7 +195,9 @@ export class TraefikConfigManager {
state.set(domain, { state.set(domain, {
exists: certExists && keyExists, exists: certExists && keyExists,
lastModified: lastModified ? Math.floor(lastModified.getTime() / 1000) : null, lastModified: lastModified
? Math.floor(lastModified.getTime() / 1000)
: null,
expiresAt, expiresAt,
wildcard wildcard
}); });
@@ -464,7 +466,9 @@ export class TraefikConfigManager {
config.getRawConfig().traefik.site_types, config.getRawConfig().traefik.site_types,
build == "oss", // filter out the namespace domains in open source build == "oss", // filter out the namespace domains in open source
build != "oss", // generate the login pages on the cloud and hybrid, build != "oss", // generate the login pages on the cloud and hybrid,
build == "saas" ? false : config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config build == "saas"
? false
: config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
); );
const domains = new Set<string>(); const domains = new Set<string>();
@@ -788,7 +792,10 @@ export class TraefikConfigManager {
// Store the certificate expiry time // Store the certificate expiry time
if (cert.expiresAt) { if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at"); const expiresAtPath = path.join(
domainDir,
".expires_at"
);
fs.writeFileSync( fs.writeFileSync(
expiresAtPath, expiresAtPath,
cert.expiresAt.toString(), cert.expiresAt.toString(),

View File

@@ -1 +1 @@
export * from "./getTraefikConfig"; export * from "./getTraefikConfig";

View File

@@ -2,234 +2,249 @@ import { assertEquals } from "@test/assert";
import { isDomainCoveredByWildcard } from "./TraefikConfigManager"; import { isDomainCoveredByWildcard } from "./TraefikConfigManager";
function runTests() { function runTests() {
console.log('Running wildcard domain coverage tests...'); console.log("Running wildcard domain coverage tests...");
// Test case 1: Basic wildcard certificate at example.com // Test case 1: Basic wildcard certificate at example.com
const basicWildcardCerts = new Map([ const basicWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }] ["example.com", { exists: true, wildcard: true }]
]); ]);
// Should match first-level subdomains // Should match first-level subdomains
assertEquals( assertEquals(
isDomainCoveredByWildcard('level1.example.com', basicWildcardCerts), isDomainCoveredByWildcard("level1.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match level1.example.com' "Wildcard cert at example.com should match level1.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('api.example.com', basicWildcardCerts), isDomainCoveredByWildcard("api.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match api.example.com' "Wildcard cert at example.com should match api.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('www.example.com', basicWildcardCerts), isDomainCoveredByWildcard("www.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match www.example.com' "Wildcard cert at example.com should match www.example.com"
); );
// Should match the root domain (exact match) // Should match the root domain (exact match)
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', basicWildcardCerts), isDomainCoveredByWildcard("example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match example.com itself' "Wildcard cert at example.com should match example.com itself"
); );
// Should NOT match second-level subdomains // Should NOT match second-level subdomains
assertEquals( assertEquals(
isDomainCoveredByWildcard('level2.level1.example.com', basicWildcardCerts), isDomainCoveredByWildcard(
"level2.level1.example.com",
basicWildcardCerts
),
false, false,
'Wildcard cert at example.com should NOT match level2.level1.example.com' "Wildcard cert at example.com should NOT match level2.level1.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.nested.subdomain.example.com', basicWildcardCerts), isDomainCoveredByWildcard(
"deep.nested.subdomain.example.com",
basicWildcardCerts
),
false, false,
'Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com' "Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com"
); );
// Should NOT match different domains // Should NOT match different domains
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.otherdomain.com', basicWildcardCerts), isDomainCoveredByWildcard("test.otherdomain.com", basicWildcardCerts),
false, false,
'Wildcard cert at example.com should NOT match test.otherdomain.com' "Wildcard cert at example.com should NOT match test.otherdomain.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('notexample.com', basicWildcardCerts), isDomainCoveredByWildcard("notexample.com", basicWildcardCerts),
false, false,
'Wildcard cert at example.com should NOT match notexample.com' "Wildcard cert at example.com should NOT match notexample.com"
); );
// Test case 2: Multiple wildcard certificates // Test case 2: Multiple wildcard certificates
const multipleWildcardCerts = new Map([ const multipleWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }], ["example.com", { exists: true, wildcard: true }],
['test.org', { exists: true, wildcard: true }], ["test.org", { exists: true, wildcard: true }],
['api.service.net', { exists: true, wildcard: true }] ["api.service.net", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('app.example.com', multipleWildcardCerts), isDomainCoveredByWildcard("app.example.com", multipleWildcardCerts),
true, true,
'Should match subdomain of first wildcard cert' "Should match subdomain of first wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('staging.test.org', multipleWildcardCerts), isDomainCoveredByWildcard("staging.test.org", multipleWildcardCerts),
true, true,
'Should match subdomain of second wildcard cert' "Should match subdomain of second wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('v1.api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard("v1.api.service.net", multipleWildcardCerts),
true, true,
'Should match subdomain of third wildcard cert' "Should match subdomain of third wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.nested.api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard(
"deep.nested.api.service.net",
multipleWildcardCerts
),
false, false,
'Should NOT match multi-level subdomain of third wildcard cert' "Should NOT match multi-level subdomain of third wildcard cert"
); );
// Test exact domain matches for multiple certs // Test exact domain matches for multiple certs
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', multipleWildcardCerts), isDomainCoveredByWildcard("example.com", multipleWildcardCerts),
true, true,
'Should match exact domain of first wildcard cert' "Should match exact domain of first wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.org', multipleWildcardCerts), isDomainCoveredByWildcard("test.org", multipleWildcardCerts),
true, true,
'Should match exact domain of second wildcard cert' "Should match exact domain of second wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard("api.service.net", multipleWildcardCerts),
true, true,
'Should match exact domain of third wildcard cert' "Should match exact domain of third wildcard cert"
); );
// Test case 3: Non-wildcard certificates (should not match anything) // Test case 3: Non-wildcard certificates (should not match anything)
const nonWildcardCerts = new Map([ const nonWildcardCerts = new Map([
['example.com', { exists: true, wildcard: false }], ["example.com", { exists: true, wildcard: false }],
['specific.domain.com', { exists: true, wildcard: false }] ["specific.domain.com", { exists: true, wildcard: false }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonWildcardCerts), isDomainCoveredByWildcard("sub.example.com", nonWildcardCerts),
false, false,
'Non-wildcard cert should not match subdomains' "Non-wildcard cert should not match subdomains"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', nonWildcardCerts), isDomainCoveredByWildcard("example.com", nonWildcardCerts),
false, false,
'Non-wildcard cert should not match even exact domain via this function' "Non-wildcard cert should not match even exact domain via this function"
); );
// Test case 4: Non-existent certificates (should not match) // Test case 4: Non-existent certificates (should not match)
const nonExistentCerts = new Map([ const nonExistentCerts = new Map([
['example.com', { exists: false, wildcard: true }], ["example.com", { exists: false, wildcard: true }],
['missing.com', { exists: false, wildcard: true }] ["missing.com", { exists: false, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonExistentCerts), isDomainCoveredByWildcard("sub.example.com", nonExistentCerts),
false, false,
'Non-existent wildcard cert should not match' "Non-existent wildcard cert should not match"
); );
// Test case 5: Edge cases with special domain names // Test case 5: Edge cases with special domain names
const specialDomainCerts = new Map([ const specialDomainCerts = new Map([
['localhost', { exists: true, wildcard: true }], ["localhost", { exists: true, wildcard: true }],
['127-0-0-1.nip.io', { exists: true, wildcard: true }], ["127-0-0-1.nip.io", { exists: true, wildcard: true }],
['xn--e1afmkfd.xn--p1ai', { exists: true, wildcard: true }] // IDN domain ["xn--e1afmkfd.xn--p1ai", { exists: true, wildcard: true }] // IDN domain
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('app.localhost', specialDomainCerts), isDomainCoveredByWildcard("app.localhost", specialDomainCerts),
true, true,
'Should match subdomain of localhost wildcard' "Should match subdomain of localhost wildcard"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.127-0-0-1.nip.io', specialDomainCerts), isDomainCoveredByWildcard("test.127-0-0-1.nip.io", specialDomainCerts),
true, true,
'Should match subdomain of nip.io wildcard' "Should match subdomain of nip.io wildcard"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.xn--e1afmkfd.xn--p1ai', specialDomainCerts), isDomainCoveredByWildcard(
"sub.xn--e1afmkfd.xn--p1ai",
specialDomainCerts
),
true, true,
'Should match subdomain of IDN wildcard' "Should match subdomain of IDN wildcard"
); );
// Test case 6: Empty input and edge cases // Test case 6: Empty input and edge cases
const emptyCerts = new Map(); const emptyCerts = new Map();
assertEquals( assertEquals(
isDomainCoveredByWildcard('any.domain.com', emptyCerts), isDomainCoveredByWildcard("any.domain.com", emptyCerts),
false, false,
'Empty certificate map should not match any domain' "Empty certificate map should not match any domain"
); );
// Test case 7: Domains with single character components // Test case 7: Domains with single character components
const singleCharCerts = new Map([ const singleCharCerts = new Map([
['a.com', { exists: true, wildcard: true }], ["a.com", { exists: true, wildcard: true }],
['x.y.z', { exists: true, wildcard: true }] ["x.y.z", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('b.a.com', singleCharCerts), isDomainCoveredByWildcard("b.a.com", singleCharCerts),
true, true,
'Should match single character subdomain' "Should match single character subdomain"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('w.x.y.z', singleCharCerts), isDomainCoveredByWildcard("w.x.y.z", singleCharCerts),
true, true,
'Should match single character subdomain of multi-part domain' "Should match single character subdomain of multi-part domain"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('v.w.x.y.z', singleCharCerts), isDomainCoveredByWildcard("v.w.x.y.z", singleCharCerts),
false, false,
'Should NOT match multi-level subdomain of single char domain' "Should NOT match multi-level subdomain of single char domain"
); );
// Test case 8: Domains with numbers and hyphens // Test case 8: Domains with numbers and hyphens
const numericCerts = new Map([ const numericCerts = new Map([
['api-v2.service-1.com', { exists: true, wildcard: true }], ["api-v2.service-1.com", { exists: true, wildcard: true }],
['123.456.net', { exists: true, wildcard: true }] ["123.456.net", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('staging.api-v2.service-1.com', numericCerts), isDomainCoveredByWildcard("staging.api-v2.service-1.com", numericCerts),
true, true,
'Should match subdomain with hyphens and numbers' "Should match subdomain with hyphens and numbers"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.123.456.net', numericCerts), isDomainCoveredByWildcard("test.123.456.net", numericCerts),
true, true,
'Should match subdomain with numeric components' "Should match subdomain with numeric components"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.staging.api-v2.service-1.com', numericCerts), isDomainCoveredByWildcard(
"deep.staging.api-v2.service-1.com",
numericCerts
),
false, false,
'Should NOT match multi-level subdomain with hyphens and numbers' "Should NOT match multi-level subdomain with hyphens and numbers"
); );
console.log('All wildcard domain coverage tests passed!'); console.log("All wildcard domain coverage tests passed!");
} }
// Run all tests // Run all tests
try { try {
runTests(); runTests();
} catch (error) { } catch (error) {
console.error('Test failed:', error); console.error("Test failed:", error);
process.exit(1); process.exit(1);
} }

View File

@@ -31,12 +31,17 @@ export function validatePathRewriteConfig(
} }
if (rewritePathType !== "stripPrefix") { if (rewritePathType !== "stripPrefix") {
if ((rewritePath && !rewritePathType) || (!rewritePath && rewritePathType)) { if (
return { isValid: false, error: "Both rewritePath and rewritePathType must be specified together" }; (rewritePath && !rewritePathType) ||
(!rewritePath && rewritePathType)
) {
return {
isValid: false,
error: "Both rewritePath and rewritePathType must be specified together"
};
} }
} }
if (!rewritePath || !rewritePathType) { if (!rewritePath || !rewritePathType) {
return { isValid: true }; return { isValid: true };
} }
@@ -68,14 +73,14 @@ export function validatePathRewriteConfig(
} }
} }
// Additional validation for stripPrefix // Additional validation for stripPrefix
if (rewritePathType === "stripPrefix") { if (rewritePathType === "stripPrefix") {
if (pathMatchType !== "prefix") { if (pathMatchType !== "prefix") {
logger.warn(`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`); logger.warn(
`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`
);
} }
} }
return { isValid: true }; return { isValid: true };
} }

View File

@@ -1,71 +1,247 @@
import { isValidUrlGlobPattern } from "./validators"; import { isValidUrlGlobPattern } from "./validators";
import { assertEquals } from "@test/assert"; import { assertEquals } from "@test/assert";
function runTests() { function runTests() {
console.log('Running URL pattern validation tests...'); console.log("Running URL pattern validation tests...");
// Test valid patterns // Test valid patterns
assertEquals(isValidUrlGlobPattern('simple'), true, 'Simple path segment should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('simple/path'), true, 'Simple path with slash should be valid'); isValidUrlGlobPattern("simple"),
assertEquals(isValidUrlGlobPattern('/leading/slash'), true, 'Path with leading slash should be valid'); true,
assertEquals(isValidUrlGlobPattern('path/'), true, 'Path with trailing slash should be valid'); "Simple path segment should be valid"
assertEquals(isValidUrlGlobPattern('path/*'), true, 'Path with wildcard segment should be valid'); );
assertEquals(isValidUrlGlobPattern('*'), true, 'Single wildcard should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('*/subpath'), true, 'Wildcard with subpath should be valid'); isValidUrlGlobPattern("simple/path"),
assertEquals(isValidUrlGlobPattern('path/*/more'), true, 'Path with wildcard in the middle should be valid'); true,
"Simple path with slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("/leading/slash"),
true,
"Path with leading slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/"),
true,
"Path with trailing slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*"),
true,
"Path with wildcard segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*"),
true,
"Single wildcard should be valid"
);
assertEquals(
isValidUrlGlobPattern("*/subpath"),
true,
"Wildcard with subpath should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*/more"),
true,
"Path with wildcard in the middle should be valid"
);
// Test with special characters // Test with special characters
assertEquals(isValidUrlGlobPattern('path-with-dash'), true, 'Path with dash should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path_with_underscore'), true, 'Path with underscore should be valid'); isValidUrlGlobPattern("path-with-dash"),
assertEquals(isValidUrlGlobPattern('path.with.dots'), true, 'Path with dots should be valid'); true,
assertEquals(isValidUrlGlobPattern('path~with~tilde'), true, 'Path with tilde should be valid'); "Path with dash should be valid"
assertEquals(isValidUrlGlobPattern('path!with!exclamation'), true, 'Path with exclamation should be valid'); );
assertEquals(isValidUrlGlobPattern('path$with$dollar'), true, 'Path with dollar should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path&with&ampersand'), true, 'Path with ampersand should be valid'); isValidUrlGlobPattern("path_with_underscore"),
assertEquals(isValidUrlGlobPattern("path'with'quote"), true, "Path with quote should be valid"); true,
assertEquals(isValidUrlGlobPattern('path(with)parentheses'), true, 'Path with parentheses should be valid'); "Path with underscore should be valid"
assertEquals(isValidUrlGlobPattern('path+with+plus'), true, 'Path with plus should be valid'); );
assertEquals(isValidUrlGlobPattern('path,with,comma'), true, 'Path with comma should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path;with;semicolon'), true, 'Path with semicolon should be valid'); isValidUrlGlobPattern("path.with.dots"),
assertEquals(isValidUrlGlobPattern('path=with=equals'), true, 'Path with equals should be valid'); true,
assertEquals(isValidUrlGlobPattern('path:with:colon'), true, 'Path with colon should be valid'); "Path with dots should be valid"
assertEquals(isValidUrlGlobPattern('path@with@at'), true, 'Path with at should be valid'); );
assertEquals(
isValidUrlGlobPattern("path~with~tilde"),
true,
"Path with tilde should be valid"
);
assertEquals(
isValidUrlGlobPattern("path!with!exclamation"),
true,
"Path with exclamation should be valid"
);
assertEquals(
isValidUrlGlobPattern("path$with$dollar"),
true,
"Path with dollar should be valid"
);
assertEquals(
isValidUrlGlobPattern("path&with&ampersand"),
true,
"Path with ampersand should be valid"
);
assertEquals(
isValidUrlGlobPattern("path'with'quote"),
true,
"Path with quote should be valid"
);
assertEquals(
isValidUrlGlobPattern("path(with)parentheses"),
true,
"Path with parentheses should be valid"
);
assertEquals(
isValidUrlGlobPattern("path+with+plus"),
true,
"Path with plus should be valid"
);
assertEquals(
isValidUrlGlobPattern("path,with,comma"),
true,
"Path with comma should be valid"
);
assertEquals(
isValidUrlGlobPattern("path;with;semicolon"),
true,
"Path with semicolon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path=with=equals"),
true,
"Path with equals should be valid"
);
assertEquals(
isValidUrlGlobPattern("path:with:colon"),
true,
"Path with colon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path@with@at"),
true,
"Path with at should be valid"
);
// Test with percent encoding // Test with percent encoding
assertEquals(isValidUrlGlobPattern('path%20with%20spaces'), true, 'Path with percent-encoded spaces should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path%2Fwith%2Fencoded%2Fslashes'), true, 'Path with percent-encoded slashes should be valid'); isValidUrlGlobPattern("path%20with%20spaces"),
true,
"Path with percent-encoded spaces should be valid"
);
assertEquals(
isValidUrlGlobPattern("path%2Fwith%2Fencoded%2Fslashes"),
true,
"Path with percent-encoded slashes should be valid"
);
// Test with wildcards in segments (the fixed functionality) // Test with wildcards in segments (the fixed functionality)
assertEquals(isValidUrlGlobPattern('padbootstrap*'), true, 'Path with wildcard at the end of segment should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('pad*bootstrap'), true, 'Path with wildcard in the middle of segment should be valid'); isValidUrlGlobPattern("padbootstrap*"),
assertEquals(isValidUrlGlobPattern('*bootstrap'), true, 'Path with wildcard at the start of segment should be valid'); true,
assertEquals(isValidUrlGlobPattern('multiple*wildcards*in*segment'), true, 'Path with multiple wildcards in segment should be valid'); "Path with wildcard at the end of segment should be valid"
assertEquals(isValidUrlGlobPattern('wild*/cards/in*/different/seg*ments'), true, 'Path with wildcards in different segments should be valid'); );
assertEquals(
isValidUrlGlobPattern("pad*bootstrap"),
true,
"Path with wildcard in the middle of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*bootstrap"),
true,
"Path with wildcard at the start of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("multiple*wildcards*in*segment"),
true,
"Path with multiple wildcards in segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("wild*/cards/in*/different/seg*ments"),
true,
"Path with wildcards in different segments should be valid"
);
// Test invalid patterns // Test invalid patterns
assertEquals(isValidUrlGlobPattern(''), false, 'Empty string should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('//double/slash'), false, 'Path with double slash should be invalid'); isValidUrlGlobPattern(""),
assertEquals(isValidUrlGlobPattern('path//end'), false, 'Path with double slash in the middle should be invalid'); false,
assertEquals(isValidUrlGlobPattern('invalid<char>'), false, 'Path with invalid characters should be invalid'); "Empty string should be invalid"
assertEquals(isValidUrlGlobPattern('invalid|char'), false, 'Path with invalid pipe character should be invalid'); );
assertEquals(isValidUrlGlobPattern('invalid"char'), false, 'Path with invalid quote character should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('invalid`char'), false, 'Path with invalid backtick character should be invalid'); isValidUrlGlobPattern("//double/slash"),
assertEquals(isValidUrlGlobPattern('invalid^char'), false, 'Path with invalid caret character should be invalid'); false,
assertEquals(isValidUrlGlobPattern('invalid\\char'), false, 'Path with invalid backslash character should be invalid'); "Path with double slash should be invalid"
assertEquals(isValidUrlGlobPattern('invalid[char]'), false, 'Path with invalid square brackets should be invalid'); );
assertEquals(isValidUrlGlobPattern('invalid{char}'), false, 'Path with invalid curly braces should be invalid'); assertEquals(
isValidUrlGlobPattern("path//end"),
false,
"Path with double slash in the middle should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid<char>"),
false,
"Path with invalid characters should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid|char"),
false,
"Path with invalid pipe character should be invalid"
);
assertEquals(
isValidUrlGlobPattern('invalid"char'),
false,
"Path with invalid quote character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid`char"),
false,
"Path with invalid backtick character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid^char"),
false,
"Path with invalid caret character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid\\char"),
false,
"Path with invalid backslash character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid[char]"),
false,
"Path with invalid square brackets should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid{char}"),
false,
"Path with invalid curly braces should be invalid"
);
// Test invalid percent encoding // Test invalid percent encoding
assertEquals(isValidUrlGlobPattern('invalid%2'), false, 'Path with incomplete percent encoding should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('invalid%GZ'), false, 'Path with invalid hex in percent encoding should be invalid'); isValidUrlGlobPattern("invalid%2"),
assertEquals(isValidUrlGlobPattern('invalid%'), false, 'Path with isolated percent sign should be invalid'); false,
"Path with incomplete percent encoding should be invalid"
console.log('All tests passed!'); );
assertEquals(
isValidUrlGlobPattern("invalid%GZ"),
false,
"Path with invalid hex in percent encoding should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid%"),
false,
"Path with isolated percent sign should be invalid"
);
console.log("All tests passed!");
} }
// Run all tests // Run all tests
try { try {
runTests(); runTests();
} catch (error) { } catch (error) {
console.error('Test failed:', error); console.error("Test failed:", error);
} }

View File

@@ -2,7 +2,9 @@ import z from "zod";
import ipaddr from "ipaddr.js"; import ipaddr from "ipaddr.js";
export function isValidCIDR(cidr: string): boolean { export function isValidCIDR(cidr: string): boolean {
return z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success; return (
z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success
);
} }
export function isValidIP(ip: string): boolean { export function isValidIP(ip: string): boolean {
@@ -69,11 +71,11 @@ export function isUrlValid(url: string | undefined) {
if (!url) return true; // the link is optional in the schema so if it's empty it's valid if (!url) return true; // the link is optional in the schema so if it's empty it's valid
var pattern = new RegExp( var pattern = new RegExp(
"^(https?:\\/\\/)?" + // protocol "^(https?:\\/\\/)?" + // protocol
"((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name "((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name
"((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address "((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address
"(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path "(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path
"(\\?[;&a-z\\d%_.~+=-]*)?" + // query string "(\\?[;&a-z\\d%_.~+=-]*)?" + // query string
"(\\#[-a-z\\d_]*)?$", "(\\#[-a-z\\d_]*)?$",
"i" "i"
); );
return !!pattern.test(url); return !!pattern.test(url);
@@ -168,14 +170,14 @@ export function validateHeaders(headers: string): boolean {
} }
export function isSecondLevelDomain(domain: string): boolean { export function isSecondLevelDomain(domain: string): boolean {
if (!domain || typeof domain !== 'string') { if (!domain || typeof domain !== "string") {
return false; return false;
} }
const trimmedDomain = domain.trim().toLowerCase(); const trimmedDomain = domain.trim().toLowerCase();
// Split into parts // Split into parts
const parts = trimmedDomain.split('.'); const parts = trimmedDomain.split(".");
// Should have exactly 2 parts for a second-level domain (e.g., "example.com") // Should have exactly 2 parts for a second-level domain (e.g., "example.com")
if (parts.length !== 2) { if (parts.length !== 2) {

View File

@@ -20,6 +20,6 @@ export const errorHandlerMiddleware: ErrorRequestHandler = (
error: true, error: true,
message: error.message || "Internal Server Error", message: error.message || "Internal Server Error",
status: statusCode, status: statusCode,
stack: process.env.ENVIRONMENT === "prod" ? null : error.stack, stack: process.env.ENVIRONMENT === "prod" ? null : error.stack
}); });
}; };

View File

@@ -8,13 +8,13 @@ import HttpCode from "@server/types/HttpCode";
export async function getUserOrgs( export async function getUserOrgs(
req: Request, req: Request,
res: Response, res: Response,
next: NextFunction, next: NextFunction
) { ) {
const userId = req.user?.userId; // Assuming you have user information in the request const userId = req.user?.userId; // Assuming you have user information in the request
if (!userId) { if (!userId) {
return next( return next(
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated"), createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated")
); );
} }
@@ -22,7 +22,7 @@ export async function getUserOrgs(
const userOrganizations = await db const userOrganizations = await db
.select({ .select({
orgId: userOrgs.orgId, orgId: userOrgs.orgId,
roleId: userOrgs.roleId, roleId: userOrgs.roleId
}) })
.from(userOrgs) .from(userOrgs)
.where(eq(userOrgs.userId, userId)); .where(eq(userOrgs.userId, userId));
@@ -38,8 +38,8 @@ export async function getUserOrgs(
next( next(
createHttpError( createHttpError(
HttpCode.INTERNAL_SERVER_ERROR, HttpCode.INTERNAL_SERVER_ERROR,
"Error retrieving user organizations", "Error retrieving user organizations"
), )
); );
} }
} }

View File

@@ -12,4 +12,4 @@ export * from "./verifyAccessTokenAccess";
export * from "./verifyApiKeyIsRoot"; export * from "./verifyApiKeyIsRoot";
export * from "./verifyApiKeyApiKeyAccess"; export * from "./verifyApiKeyApiKeyAccess";
export * from "./verifyApiKeyClientAccess"; export * from "./verifyApiKeyClientAccess";
export * from "./verifyApiKeySiteResourceAccess"; export * from "./verifyApiKeySiteResourceAccess";

View File

@@ -97,7 +97,6 @@ export async function verifyApiKeyAccessTokenAccess(
); );
} }
return next(); return next();
} catch (e) { } catch (e) {
return next( return next(

View File

@@ -11,7 +11,7 @@ export async function verifyApiKeyApiKeyAccess(
next: NextFunction next: NextFunction
) { ) {
try { try {
const {apiKey: callerApiKey } = req; const { apiKey: callerApiKey } = req;
const apiKeyId = const apiKeyId =
req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId; req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId;
@@ -44,7 +44,10 @@ export async function verifyApiKeyApiKeyAccess(
.select() .select()
.from(apiKeyOrg) .from(apiKeyOrg)
.where( .where(
and(eq(apiKeys.apiKeyId, callerApiKey.apiKeyId), eq(apiKeyOrg.orgId, orgId)) and(
eq(apiKeys.apiKeyId, callerApiKey.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
) )
.limit(1); .limit(1);

View File

@@ -11,9 +11,12 @@ export async function verifyApiKeySetResourceClients(
next: NextFunction next: NextFunction
) { ) {
const apiKey = req.apiKey; const apiKey = req.apiKey;
const singleClientId = req.params.clientId || req.body.clientId || req.query.clientId; const singleClientId =
req.params.clientId || req.body.clientId || req.query.clientId;
const { clientIds } = req.body; const { clientIds } = req.body;
const allClientIds = clientIds || (singleClientId ? [parseInt(singleClientId as string)] : []); const allClientIds =
clientIds ||
(singleClientId ? [parseInt(singleClientId as string)] : []);
if (!apiKey) { if (!apiKey) {
return next( return next(
@@ -70,4 +73,3 @@ export async function verifyApiKeySetResourceClients(
); );
} }
} }

View File

@@ -11,7 +11,8 @@ export async function verifyApiKeySetResourceUsers(
next: NextFunction next: NextFunction
) { ) {
const apiKey = req.apiKey; const apiKey = req.apiKey;
const singleUserId = req.params.userId || req.body.userId || req.query.userId; const singleUserId =
req.params.userId || req.body.userId || req.query.userId;
const { userIds } = req.body; const { userIds } = req.body;
const allUserIds = userIds || (singleUserId ? [singleUserId] : []); const allUserIds = userIds || (singleUserId ? [singleUserId] : []);

View File

@@ -38,17 +38,12 @@ export async function verifyApiKeySiteResourceAccess(
const [siteResource] = await db const [siteResource] = await db
.select() .select()
.from(siteResources) .from(siteResources)
.where(and( .where(and(eq(siteResources.siteResourceId, siteResourceId)))
eq(siteResources.siteResourceId, siteResourceId)
))
.limit(1); .limit(1);
if (!siteResource) { if (!siteResource) {
return next( return next(
createHttpError( createHttpError(HttpCode.NOT_FOUND, "Site resource not found")
HttpCode.NOT_FOUND,
"Site resource not found"
)
); );
} }

View File

@@ -5,7 +5,7 @@ import HttpCode from "@server/types/HttpCode";
export function notFoundMiddleware( export function notFoundMiddleware(
req: Request, req: Request,
res: Response, res: Response,
next: NextFunction, next: NextFunction
) { ) {
if (req.path.startsWith("/api")) { if (req.path.startsWith("/api")) {
const message = `The requests url is not found - ${req.originalUrl}`; const message = `The requests url is not found - ${req.originalUrl}`;

View File

@@ -1,30 +1,32 @@
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from "express";
import logger from '@server/logger'; import logger from "@server/logger";
import createHttpError from 'http-errors'; import createHttpError from "http-errors";
import HttpCode from '@server/types/HttpCode'; import HttpCode from "@server/types/HttpCode";
export function requestTimeoutMiddleware(timeoutMs: number = 30000) { export function requestTimeoutMiddleware(timeoutMs: number = 30000) {
return (req: Request, res: Response, next: NextFunction) => { return (req: Request, res: Response, next: NextFunction) => {
// Set a timeout for the request // Set a timeout for the request
const timeout = setTimeout(() => { const timeout = setTimeout(() => {
if (!res.headersSent) { if (!res.headersSent) {
logger.error(`Request timeout: ${req.method} ${req.url} from ${req.ip}`); logger.error(
`Request timeout: ${req.method} ${req.url} from ${req.ip}`
);
return next( return next(
createHttpError( createHttpError(
HttpCode.REQUEST_TIMEOUT, HttpCode.REQUEST_TIMEOUT,
'Request timeout - operation took too long to complete' "Request timeout - operation took too long to complete"
) )
); );
} }
}, timeoutMs); }, timeoutMs);
// Clear timeout when response finishes // Clear timeout when response finishes
res.on('finish', () => { res.on("finish", () => {
clearTimeout(timeout); clearTimeout(timeout);
}); });
// Clear timeout when response closes // Clear timeout when response closes
res.on('close', () => { res.on("close", () => {
clearTimeout(timeout); clearTimeout(timeout);
}); });

View File

@@ -76,7 +76,10 @@ export async function verifySiteAccess(
.select() .select()
.from(userOrgs) .from(userOrgs)
.where( .where(
and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, site.orgId)) and(
eq(userOrgs.userId, userId),
eq(userOrgs.orgId, site.orgId)
)
) )
.limit(1); .limit(1);
req.userOrg = userOrgRole[0]; req.userOrg = userOrgRole[0];

View File

@@ -9,7 +9,10 @@ const nextPort = config.getRawConfig().server.next_port;
export async function createNextServer() { export async function createNextServer() {
// const app = next({ dev }); // const app = next({ dev });
const app = next({ dev: process.env.ENVIRONMENT !== "prod", turbopack: true }); const app = next({
dev: process.env.ENVIRONMENT !== "prod",
turbopack: true
});
const handle = app.getRequestHandler(); const handle = app.getRequestHandler();
await app.prepare(); await app.prepare();

View File

@@ -11,11 +11,14 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import { import { encodeHexLowerCase } from "@oslojs/encoding";
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { RemoteExitNode, remoteExitNodes, remoteExitNodeSessions, RemoteExitNodeSession } from "@server/db"; import {
RemoteExitNode,
remoteExitNodes,
remoteExitNodeSessions,
RemoteExitNodeSession
} from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
@@ -23,30 +26,39 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createRemoteExitNodeSession( export async function createRemoteExitNodeSession(
token: string, token: string,
remoteExitNodeId: string, remoteExitNodeId: string
): Promise<RemoteExitNodeSession> { ): Promise<RemoteExitNodeSession> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const session: RemoteExitNodeSession = { const session: RemoteExitNodeSession = {
sessionId: sessionId, sessionId: sessionId,
remoteExitNodeId, remoteExitNodeId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(), expiresAt: new Date(Date.now() + EXPIRES).getTime()
}; };
await db.insert(remoteExitNodeSessions).values(session); await db.insert(remoteExitNodeSessions).values(session);
return session; return session;
} }
export async function validateRemoteExitNodeSessionToken( export async function validateRemoteExitNodeSessionToken(
token: string, token: string
): Promise<SessionValidationResult> { ): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await db
.select({ remoteExitNode: remoteExitNodes, session: remoteExitNodeSessions }) .select({
remoteExitNode: remoteExitNodes,
session: remoteExitNodeSessions
})
.from(remoteExitNodeSessions) .from(remoteExitNodeSessions)
.innerJoin(remoteExitNodes, eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodes.remoteExitNodeId)) .innerJoin(
remoteExitNodes,
eq(
remoteExitNodeSessions.remoteExitNodeId,
remoteExitNodes.remoteExitNodeId
)
)
.where(eq(remoteExitNodeSessions.sessionId, sessionId)); .where(eq(remoteExitNodeSessions.sessionId, sessionId));
if (result.length < 1) { if (result.length < 1) {
return { session: null, remoteExitNode: null }; return { session: null, remoteExitNode: null };
@@ -58,26 +70,32 @@ export async function validateRemoteExitNodeSessionToken(
.where(eq(remoteExitNodeSessions.sessionId, session.sessionId)); .where(eq(remoteExitNodeSessions.sessionId, session.sessionId));
return { session: null, remoteExitNode: null }; return { session: null, remoteExitNode: null };
} }
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) { if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date( session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
Date.now() + EXPIRES,
).getTime();
await db await db
.update(remoteExitNodeSessions) .update(remoteExitNodeSessions)
.set({ .set({
expiresAt: session.expiresAt, expiresAt: session.expiresAt
}) })
.where(eq(remoteExitNodeSessions.sessionId, session.sessionId)); .where(eq(remoteExitNodeSessions.sessionId, session.sessionId));
} }
return { session, remoteExitNode }; return { session, remoteExitNode };
} }
export async function invalidateRemoteExitNodeSession(sessionId: string): Promise<void> { export async function invalidateRemoteExitNodeSession(
await db.delete(remoteExitNodeSessions).where(eq(remoteExitNodeSessions.sessionId, sessionId)); sessionId: string
): Promise<void> {
await db
.delete(remoteExitNodeSessions)
.where(eq(remoteExitNodeSessions.sessionId, sessionId));
} }
export async function invalidateAllRemoteExitNodeSessions(remoteExitNodeId: string): Promise<void> { export async function invalidateAllRemoteExitNodeSessions(
await db.delete(remoteExitNodeSessions).where(eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodeId)); remoteExitNodeId: string
): Promise<void> {
await db
.delete(remoteExitNodeSessions)
.where(eq(remoteExitNodeSessions.remoteExitNodeId, remoteExitNodeId));
} }
export type SessionValidationResult = export type SessionValidationResult =

View File

@@ -25,4 +25,4 @@ export async function initCleanup() {
// Handle process termination // Handle process termination
process.on("SIGTERM", () => cleanup()); process.on("SIGTERM", () => cleanup());
process.on("SIGINT", () => cleanup()); process.on("SIGINT", () => cleanup());
} }

View File

@@ -12,4 +12,4 @@
*/ */
export * from "./getOrgTierData"; export * from "./getOrgTierData";
export * from "./createCustomer"; export * from "./createCustomer";

View File

@@ -55,7 +55,6 @@ export async function getValidCertificatesForDomains(
domains: Set<string>, domains: Set<string>,
useCache: boolean = true useCache: boolean = true
): Promise<Array<CertificateResult>> { ): Promise<Array<CertificateResult>> {
loadEncryptData(); // Ensure encryption key is loaded loadEncryptData(); // Ensure encryption key is loaded
const finalResults: CertificateResult[] = []; const finalResults: CertificateResult[] = [];

View File

@@ -12,14 +12,7 @@
*/ */
import { build } from "@server/build"; import { build } from "@server/build";
import { import { db, Org, orgs, ResourceSession, sessions, users } from "@server/db";
db,
Org,
orgs,
ResourceSession,
sessions,
users
} from "@server/db";
import { getOrgTierData } from "#private/lib/billing"; import { getOrgTierData } from "#private/lib/billing";
import { TierId } from "@server/lib/billing/tiers"; import { TierId } from "@server/lib/billing/tiers";
import license from "#private/license/license"; import license from "#private/license/license";

View File

@@ -66,7 +66,9 @@ export async function sendToExitNode(
// logger.debug(`Configured local exit node name: ${config.getRawConfig().gerbil.exit_node_name}`); // logger.debug(`Configured local exit node name: ${config.getRawConfig().gerbil.exit_node_name}`);
if (exitNode.name == config.getRawConfig().gerbil.exit_node_name) { if (exitNode.name == config.getRawConfig().gerbil.exit_node_name) {
hostname = privateConfig.getRawPrivateConfig().gerbil.local_exit_node_reachable_at; hostname =
privateConfig.getRawPrivateConfig().gerbil
.local_exit_node_reachable_at;
} }
if (!hostname) { if (!hostname) {

View File

@@ -44,43 +44,53 @@ async function checkExitNodeOnlineStatus(
const delayBetweenAttempts = 100; // 100ms delay between starting each attempt const delayBetweenAttempts = 100; // 100ms delay between starting each attempt
// Create promises for all attempts with staggered delays // Create promises for all attempts with staggered delays
const attemptPromises = Array.from({ length: maxAttempts }, async (_, index) => { const attemptPromises = Array.from(
const attemptNumber = index + 1; { length: maxAttempts },
async (_, index) => {
// Add delay before each attempt (except the first) const attemptNumber = index + 1;
if (index > 0) {
await new Promise((resolve) => setTimeout(resolve, delayBetweenAttempts * index));
}
try { // Add delay before each attempt (except the first)
const response = await axios.get(`http://${endpoint}/ping`, { if (index > 0) {
timeout: timeoutMs, await new Promise((resolve) =>
validateStatus: (status) => status === 200 setTimeout(resolve, delayBetweenAttempts * index)
});
if (response.status === 200) {
logger.debug(
`Exit node ${endpoint} is online (attempt ${attemptNumber}/${maxAttempts})`
); );
return { success: true, attemptNumber };
} }
return { success: false, attemptNumber, error: 'Non-200 status' };
} catch (error) { try {
const errorMessage = error instanceof Error ? error.message : "Unknown error"; const response = await axios.get(`http://${endpoint}/ping`, {
logger.debug( timeout: timeoutMs,
`Exit node ${endpoint} ping failed (attempt ${attemptNumber}/${maxAttempts}): ${errorMessage}` validateStatus: (status) => status === 200
); });
return { success: false, attemptNumber, error: errorMessage };
if (response.status === 200) {
logger.debug(
`Exit node ${endpoint} is online (attempt ${attemptNumber}/${maxAttempts})`
);
return { success: true, attemptNumber };
}
return {
success: false,
attemptNumber,
error: "Non-200 status"
};
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
logger.debug(
`Exit node ${endpoint} ping failed (attempt ${attemptNumber}/${maxAttempts}): ${errorMessage}`
);
return { success: false, attemptNumber, error: errorMessage };
}
} }
}); );
try { try {
// Wait for the first successful response or all to fail // Wait for the first successful response or all to fail
const results = await Promise.allSettled(attemptPromises); const results = await Promise.allSettled(attemptPromises);
// Check if any attempt succeeded // Check if any attempt succeeded
for (const result of results) { for (const result of results) {
if (result.status === 'fulfilled' && result.value.success) { if (result.status === "fulfilled" && result.value.success) {
return true; return true;
} }
} }
@@ -137,7 +147,11 @@ export async function verifyExitNodeOrgAccess(
return { hasAccess: false, exitNode }; return { hasAccess: false, exitNode };
} }
export async function listExitNodes(orgId: string, filterOnline = false, noCloud = false) { export async function listExitNodes(
orgId: string,
filterOnline = false,
noCloud = false
) {
const allExitNodes = await db const allExitNodes = await db
.select({ .select({
exitNodeId: exitNodes.exitNodeId, exitNodeId: exitNodes.exitNodeId,
@@ -166,7 +180,10 @@ export async function listExitNodes(orgId: string, filterOnline = false, noCloud
eq(exitNodes.type, "gerbil"), eq(exitNodes.type, "gerbil"),
or( or(
// only choose nodes that are in the same region // only choose nodes that are in the same region
eq(exitNodes.region, config.getRawPrivateConfig().app.region), eq(
exitNodes.region,
config.getRawPrivateConfig().app.region
),
isNull(exitNodes.region) // or for enterprise where region is not set isNull(exitNodes.region) // or for enterprise where region is not set
) )
), ),
@@ -191,7 +208,7 @@ export async function listExitNodes(orgId: string, filterOnline = false, noCloud
// let online: boolean; // let online: boolean;
// if (filterOnline && node.type == "remoteExitNode") { // if (filterOnline && node.type == "remoteExitNode") {
// try { // try {
// const isActuallyOnline = await checkExitNodeOnlineStatus( // const isActuallyOnline = await checkExitNodeOnlineStatus(
// node.endpoint // node.endpoint
// ); // );
@@ -225,7 +242,8 @@ export async function listExitNodes(orgId: string, filterOnline = false, noCloud
node.type === "remoteExitNode" && (!filterOnline || node.online) node.type === "remoteExitNode" && (!filterOnline || node.online)
); );
const gerbilExitNodes = allExitNodes.filter( const gerbilExitNodes = allExitNodes.filter(
(node) => node.type === "gerbil" && (!filterOnline || node.online) && !noCloud (node) =>
node.type === "gerbil" && (!filterOnline || node.online) && !noCloud
); );
// THIS PROVIDES THE FALL // THIS PROVIDES THE FALL
@@ -334,7 +352,11 @@ export function selectBestExitNode(
return fallbackNode; return fallbackNode;
} }
export async function checkExitNodeOrg(exitNodeId: number, orgId: string, trx: Transaction | typeof db = db) { export async function checkExitNodeOrg(
exitNodeId: number,
orgId: string,
trx: Transaction | typeof db = db
) {
const [exitNodeOrg] = await trx const [exitNodeOrg] = await trx
.select() .select()
.from(exitNodeOrgs) .from(exitNodeOrgs)

View File

@@ -12,4 +12,4 @@
*/ */
export * from "./exitNodeComms"; export * from "./exitNodeComms";
export * from "./exitNodes"; export * from "./exitNodes";

View File

@@ -177,7 +177,9 @@ export class LockManager {
const exists = value !== null; const exists = value !== null;
const ownedByMe = const ownedByMe =
exists && exists &&
value!.startsWith(`${config.getRawConfig().gerbil.exit_node_name}:`); value!.startsWith(
`${config.getRawConfig().gerbil.exit_node_name}:`
);
const owner = exists ? value!.split(":")[0] : undefined; const owner = exists ? value!.split(":")[0] : undefined;
return { return {

View File

@@ -14,15 +14,15 @@
// Simple test file for the rate limit service with Redis // Simple test file for the rate limit service with Redis
// Run with: npx ts-node rateLimitService.test.ts // Run with: npx ts-node rateLimitService.test.ts
import { RateLimitService } from './rateLimit'; import { RateLimitService } from "./rateLimit";
function generateClientId() { function generateClientId() {
return 'client-' + Math.random().toString(36).substring(2, 15); return "client-" + Math.random().toString(36).substring(2, 15);
} }
async function runTests() { async function runTests() {
console.log('Starting Rate Limit Service Tests...\n'); console.log("Starting Rate Limit Service Tests...\n");
const rateLimitService = new RateLimitService(); const rateLimitService = new RateLimitService();
let testsPassed = 0; let testsPassed = 0;
let testsTotal = 0; let testsTotal = 0;
@@ -47,36 +47,54 @@ async function runTests() {
} }
// Test 1: Basic rate limiting // Test 1: Basic rate limiting
await test('Should allow requests under the limit', async () => { await test("Should allow requests under the limit", async () => {
const clientId = generateClientId(); const clientId = generateClientId();
const maxRequests = 5; const maxRequests = 5;
for (let i = 0; i < maxRequests - 1; i++) { for (let i = 0; i < maxRequests - 1; i++) {
const result = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); const result = await rateLimitService.checkRateLimit(
clientId,
undefined,
maxRequests
);
assert(!result.isLimited, `Request ${i + 1} should be allowed`); assert(!result.isLimited, `Request ${i + 1} should be allowed`);
assert(result.totalHits === i + 1, `Expected ${i + 1} hits, got ${result.totalHits}`); assert(
result.totalHits === i + 1,
`Expected ${i + 1} hits, got ${result.totalHits}`
);
} }
}); });
// Test 2: Rate limit blocking // Test 2: Rate limit blocking
await test('Should block requests over the limit', async () => { await test("Should block requests over the limit", async () => {
const clientId = generateClientId(); const clientId = generateClientId();
const maxRequests = 30; const maxRequests = 30;
// Use up all allowed requests // Use up all allowed requests
for (let i = 0; i < maxRequests - 1; i++) { for (let i = 0; i < maxRequests - 1; i++) {
const result = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); const result = await rateLimitService.checkRateLimit(
clientId,
undefined,
maxRequests
);
assert(!result.isLimited, `Request ${i + 1} should be allowed`); assert(!result.isLimited, `Request ${i + 1} should be allowed`);
} }
// Next request should be blocked // Next request should be blocked
const blockedResult = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); const blockedResult = await rateLimitService.checkRateLimit(
assert(blockedResult.isLimited, 'Request should be blocked'); clientId,
assert(blockedResult.reason === 'global', 'Should be blocked for global reason'); undefined,
maxRequests
);
assert(blockedResult.isLimited, "Request should be blocked");
assert(
blockedResult.reason === "global",
"Should be blocked for global reason"
);
}); });
// Test 3: Message type limits // Test 3: Message type limits
await test('Should handle message type limits', async () => { await test("Should handle message type limits", async () => {
const clientId = generateClientId(); const clientId = generateClientId();
const globalMax = 10; const globalMax = 10;
const messageTypeMax = 2; const messageTypeMax = 2;
@@ -84,54 +102,64 @@ async function runTests() {
// Send messages of type 'ping' up to the limit // Send messages of type 'ping' up to the limit
for (let i = 0; i < messageTypeMax - 1; i++) { for (let i = 0; i < messageTypeMax - 1; i++) {
const result = await rateLimitService.checkRateLimit( const result = await rateLimitService.checkRateLimit(
clientId, clientId,
'ping', "ping",
globalMax, globalMax,
messageTypeMax messageTypeMax
); );
assert(!result.isLimited, `Ping message ${i + 1} should be allowed`); assert(
!result.isLimited,
`Ping message ${i + 1} should be allowed`
);
} }
// Next 'ping' should be blocked // Next 'ping' should be blocked
const blockedResult = await rateLimitService.checkRateLimit( const blockedResult = await rateLimitService.checkRateLimit(
clientId, clientId,
'ping', "ping",
globalMax, globalMax,
messageTypeMax messageTypeMax
); );
assert(blockedResult.isLimited, 'Ping message should be blocked'); assert(blockedResult.isLimited, "Ping message should be blocked");
assert(blockedResult.reason === 'message_type:ping', 'Should be blocked for message type'); assert(
blockedResult.reason === "message_type:ping",
"Should be blocked for message type"
);
// Other message types should still work // Other message types should still work
const otherResult = await rateLimitService.checkRateLimit( const otherResult = await rateLimitService.checkRateLimit(
clientId, clientId,
'pong', "pong",
globalMax, globalMax,
messageTypeMax messageTypeMax
); );
assert(!otherResult.isLimited, 'Pong message should be allowed'); assert(!otherResult.isLimited, "Pong message should be allowed");
}); });
// Test 4: Reset functionality // Test 4: Reset functionality
await test('Should reset client correctly', async () => { await test("Should reset client correctly", async () => {
const clientId = generateClientId(); const clientId = generateClientId();
const maxRequests = 3; const maxRequests = 3;
// Use up some requests // Use up some requests
await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); await rateLimitService.checkRateLimit(clientId, undefined, maxRequests);
await rateLimitService.checkRateLimit(clientId, 'test', maxRequests); await rateLimitService.checkRateLimit(clientId, "test", maxRequests);
// Reset the client // Reset the client
await rateLimitService.resetKey(clientId); await rateLimitService.resetKey(clientId);
// Should be able to make fresh requests // Should be able to make fresh requests
const result = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); const result = await rateLimitService.checkRateLimit(
assert(!result.isLimited, 'Request after reset should be allowed'); clientId,
assert(result.totalHits === 1, 'Should have 1 hit after reset'); undefined,
maxRequests
);
assert(!result.isLimited, "Request after reset should be allowed");
assert(result.totalHits === 1, "Should have 1 hit after reset");
}); });
// Test 5: Different clients are independent // Test 5: Different clients are independent
await test('Should handle different clients independently', async () => { await test("Should handle different clients independently", async () => {
const client1 = generateClientId(); const client1 = generateClientId();
const client2 = generateClientId(); const client2 = generateClientId();
const maxRequests = 2; const maxRequests = 2;
@@ -139,43 +167,62 @@ async function runTests() {
// Client 1 uses up their limit // Client 1 uses up their limit
await rateLimitService.checkRateLimit(client1, undefined, maxRequests); await rateLimitService.checkRateLimit(client1, undefined, maxRequests);
await rateLimitService.checkRateLimit(client1, undefined, maxRequests); await rateLimitService.checkRateLimit(client1, undefined, maxRequests);
const client1Blocked = await rateLimitService.checkRateLimit(client1, undefined, maxRequests); const client1Blocked = await rateLimitService.checkRateLimit(
assert(client1Blocked.isLimited, 'Client 1 should be blocked'); client1,
undefined,
maxRequests
);
assert(client1Blocked.isLimited, "Client 1 should be blocked");
// Client 2 should still be able to make requests // Client 2 should still be able to make requests
const client2Result = await rateLimitService.checkRateLimit(client2, undefined, maxRequests); const client2Result = await rateLimitService.checkRateLimit(
assert(!client2Result.isLimited, 'Client 2 should not be blocked'); client2,
assert(client2Result.totalHits === 1, 'Client 2 should have 1 hit'); undefined,
maxRequests
);
assert(!client2Result.isLimited, "Client 2 should not be blocked");
assert(client2Result.totalHits === 1, "Client 2 should have 1 hit");
}); });
// Test 6: Decrement functionality // Test 6: Decrement functionality
await test('Should decrement correctly', async () => { await test("Should decrement correctly", async () => {
const clientId = generateClientId(); const clientId = generateClientId();
const maxRequests = 5; const maxRequests = 5;
// Make some requests // Make some requests
await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); await rateLimitService.checkRateLimit(clientId, undefined, maxRequests);
await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); await rateLimitService.checkRateLimit(clientId, undefined, maxRequests);
let result = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); let result = await rateLimitService.checkRateLimit(
assert(result.totalHits === 3, 'Should have 3 hits before decrement'); clientId,
undefined,
maxRequests
);
assert(result.totalHits === 3, "Should have 3 hits before decrement");
// Decrement // Decrement
await rateLimitService.decrementRateLimit(clientId); await rateLimitService.decrementRateLimit(clientId);
// Next request should reflect the decrement // Next request should reflect the decrement
result = await rateLimitService.checkRateLimit(clientId, undefined, maxRequests); result = await rateLimitService.checkRateLimit(
assert(result.totalHits === 3, 'Should have 3 hits after decrement + increment'); clientId,
undefined,
maxRequests
);
assert(
result.totalHits === 3,
"Should have 3 hits after decrement + increment"
);
}); });
// Wait a moment for any pending Redis operations // Wait a moment for any pending Redis operations
console.log('\nWaiting for Redis sync...'); console.log("\nWaiting for Redis sync...");
await new Promise(resolve => setTimeout(resolve, 1000)); await new Promise((resolve) => setTimeout(resolve, 1000));
// Force sync to test Redis integration // Force sync to test Redis integration
await test('Should sync to Redis', async () => { await test("Should sync to Redis", async () => {
await rateLimitService.forceSyncAllPendingData(); await rateLimitService.forceSyncAllPendingData();
// If this doesn't throw, Redis sync is working // If this doesn't throw, Redis sync is working
assert(true, 'Redis sync completed'); assert(true, "Redis sync completed");
}); });
// Cleanup // Cleanup
@@ -185,18 +232,18 @@ async function runTests() {
console.log(`\n--- Test Results ---`); console.log(`\n--- Test Results ---`);
console.log(`✅ Passed: ${testsPassed}/${testsTotal}`); console.log(`✅ Passed: ${testsPassed}/${testsTotal}`);
console.log(`❌ Failed: ${testsTotal - testsPassed}/${testsTotal}`); console.log(`❌ Failed: ${testsTotal - testsPassed}/${testsTotal}`);
if (testsPassed === testsTotal) { if (testsPassed === testsTotal) {
console.log('\n🎉 All tests passed!'); console.log("\n🎉 All tests passed!");
process.exit(0); process.exit(0);
} else { } else {
console.log('\n💥 Some tests failed!'); console.log("\n💥 Some tests failed!");
process.exit(1); process.exit(1);
} }
} }
// Run the tests // Run the tests
runTests().catch(error => { runTests().catch((error) => {
console.error('Test runner error:', error); console.error("Test runner error:", error);
process.exit(1); process.exit(1);
}); });

View File

@@ -40,7 +40,8 @@ interface RateLimitResult {
export class RateLimitService { export class RateLimitService {
private localRateLimitTracker: Map<string, RateLimitTracker> = new Map(); private localRateLimitTracker: Map<string, RateLimitTracker> = new Map();
private localMessageTypeRateLimitTracker: Map<string, RateLimitTracker> = new Map(); private localMessageTypeRateLimitTracker: Map<string, RateLimitTracker> =
new Map();
private cleanupInterval: NodeJS.Timeout | null = null; private cleanupInterval: NodeJS.Timeout | null = null;
private forceSyncInterval: NodeJS.Timeout | null = null; private forceSyncInterval: NodeJS.Timeout | null = null;
@@ -68,12 +69,18 @@ export class RateLimitService {
return `ratelimit:${clientId}`; return `ratelimit:${clientId}`;
} }
private getMessageTypeRateLimitKey(clientId: string, messageType: string): string { private getMessageTypeRateLimitKey(
clientId: string,
messageType: string
): string {
return `ratelimit:${clientId}:${messageType}`; return `ratelimit:${clientId}:${messageType}`;
} }
// Helper function to clean up old timestamp fields from a Redis hash // Helper function to clean up old timestamp fields from a Redis hash
private async cleanupOldTimestamps(key: string, windowStart: number): Promise<void> { private async cleanupOldTimestamps(
key: string,
windowStart: number
): Promise<void> {
if (!redisManager.isRedisEnabled()) return; if (!redisManager.isRedisEnabled()) return;
try { try {
@@ -101,10 +108,15 @@ export class RateLimitService {
const batch = fieldsToDelete.slice(i, i + batchSize); const batch = fieldsToDelete.slice(i, i + batchSize);
await client.hdel(key, ...batch); await client.hdel(key, ...batch);
} }
logger.debug(`Cleaned up ${fieldsToDelete.length} old timestamp fields from ${key}`); logger.debug(
`Cleaned up ${fieldsToDelete.length} old timestamp fields from ${key}`
);
} }
} catch (error) { } catch (error) {
logger.error(`Failed to cleanup old timestamps for key ${key}:`, error); logger.error(
`Failed to cleanup old timestamps for key ${key}:`,
error
);
// Don't throw - cleanup failures shouldn't block rate limiting // Don't throw - cleanup failures shouldn't block rate limiting
} }
} }
@@ -114,7 +126,8 @@ export class RateLimitService {
clientId: string, clientId: string,
tracker: RateLimitTracker tracker: RateLimitTracker
): Promise<void> { ): Promise<void> {
if (!redisManager.isRedisEnabled() || tracker.pendingCount === 0) return; if (!redisManager.isRedisEnabled() || tracker.pendingCount === 0)
return;
try { try {
const currentTime = Math.floor(Date.now() / 1000); const currentTime = Math.floor(Date.now() / 1000);
@@ -132,7 +145,11 @@ export class RateLimitService {
const newValue = ( const newValue = (
parseInt(currentValue || "0") + tracker.pendingCount parseInt(currentValue || "0") + tracker.pendingCount
).toString(); ).toString();
await redisManager.hset(globalKey, currentTime.toString(), newValue); await redisManager.hset(
globalKey,
currentTime.toString(),
newValue
);
// Set TTL using the client directly - this prevents the key from persisting forever // Set TTL using the client directly - this prevents the key from persisting forever
if (redisManager.getClient()) { if (redisManager.getClient()) {
@@ -145,7 +162,9 @@ export class RateLimitService {
tracker.lastSyncedCount = tracker.count; tracker.lastSyncedCount = tracker.count;
tracker.pendingCount = 0; tracker.pendingCount = 0;
logger.debug(`Synced global rate limit to Redis for client ${clientId}`); logger.debug(
`Synced global rate limit to Redis for client ${clientId}`
);
} catch (error) { } catch (error) {
logger.error("Failed to sync global rate limit to Redis:", error); logger.error("Failed to sync global rate limit to Redis:", error);
} }
@@ -156,12 +175,16 @@ export class RateLimitService {
messageType: string, messageType: string,
tracker: RateLimitTracker tracker: RateLimitTracker
): Promise<void> { ): Promise<void> {
if (!redisManager.isRedisEnabled() || tracker.pendingCount === 0) return; if (!redisManager.isRedisEnabled() || tracker.pendingCount === 0)
return;
try { try {
const currentTime = Math.floor(Date.now() / 1000); const currentTime = Math.floor(Date.now() / 1000);
const windowStart = currentTime - RATE_LIMIT_WINDOW; const windowStart = currentTime - RATE_LIMIT_WINDOW;
const messageTypeKey = this.getMessageTypeRateLimitKey(clientId, messageType); const messageTypeKey = this.getMessageTypeRateLimitKey(
clientId,
messageType
);
// Clean up old timestamp fields before writing // Clean up old timestamp fields before writing
await this.cleanupOldTimestamps(messageTypeKey, windowStart); await this.cleanupOldTimestamps(messageTypeKey, windowStart);
@@ -195,12 +218,17 @@ export class RateLimitService {
`Synced message type rate limit to Redis for client ${clientId}, type ${messageType}` `Synced message type rate limit to Redis for client ${clientId}, type ${messageType}`
); );
} catch (error) { } catch (error) {
logger.error("Failed to sync message type rate limit to Redis:", error); logger.error(
"Failed to sync message type rate limit to Redis:",
error
);
} }
} }
// Initialize local tracker from Redis data // Initialize local tracker from Redis data
private async initializeLocalTracker(clientId: string): Promise<RateLimitTracker> { private async initializeLocalTracker(
clientId: string
): Promise<RateLimitTracker> {
const currentTime = Math.floor(Date.now() / 1000); const currentTime = Math.floor(Date.now() / 1000);
const windowStart = currentTime - RATE_LIMIT_WINDOW; const windowStart = currentTime - RATE_LIMIT_WINDOW;
@@ -215,14 +243,16 @@ export class RateLimitService {
try { try {
const globalKey = this.getRateLimitKey(clientId); const globalKey = this.getRateLimitKey(clientId);
// Clean up old timestamp fields before reading // Clean up old timestamp fields before reading
await this.cleanupOldTimestamps(globalKey, windowStart); await this.cleanupOldTimestamps(globalKey, windowStart);
const globalRateLimitData = await redisManager.hgetall(globalKey); const globalRateLimitData = await redisManager.hgetall(globalKey);
let count = 0; let count = 0;
for (const [timestamp, countStr] of Object.entries(globalRateLimitData)) { for (const [timestamp, countStr] of Object.entries(
globalRateLimitData
)) {
const time = parseInt(timestamp); const time = parseInt(timestamp);
if (time >= windowStart) { if (time >= windowStart) {
count += parseInt(countStr); count += parseInt(countStr);
@@ -236,7 +266,10 @@ export class RateLimitService {
lastSyncedCount: count lastSyncedCount: count
}; };
} catch (error) { } catch (error) {
logger.error("Failed to initialize global tracker from Redis:", error); logger.error(
"Failed to initialize global tracker from Redis:",
error
);
return { return {
count: 0, count: 0,
windowStart: currentTime, windowStart: currentTime,
@@ -263,15 +296,21 @@ export class RateLimitService {
} }
try { try {
const messageTypeKey = this.getMessageTypeRateLimitKey(clientId, messageType); const messageTypeKey = this.getMessageTypeRateLimitKey(
clientId,
messageType
);
// Clean up old timestamp fields before reading // Clean up old timestamp fields before reading
await this.cleanupOldTimestamps(messageTypeKey, windowStart); await this.cleanupOldTimestamps(messageTypeKey, windowStart);
const messageTypeRateLimitData = await redisManager.hgetall(messageTypeKey); const messageTypeRateLimitData =
await redisManager.hgetall(messageTypeKey);
let count = 0; let count = 0;
for (const [timestamp, countStr] of Object.entries(messageTypeRateLimitData)) { for (const [timestamp, countStr] of Object.entries(
messageTypeRateLimitData
)) {
const time = parseInt(timestamp); const time = parseInt(timestamp);
if (time >= windowStart) { if (time >= windowStart) {
count += parseInt(countStr); count += parseInt(countStr);
@@ -285,7 +324,10 @@ export class RateLimitService {
lastSyncedCount: count lastSyncedCount: count
}; };
} catch (error) { } catch (error) {
logger.error("Failed to initialize message type tracker from Redis:", error); logger.error(
"Failed to initialize message type tracker from Redis:",
error
);
return { return {
count: 0, count: 0,
windowStart: currentTime, windowStart: currentTime,
@@ -327,7 +369,10 @@ export class RateLimitService {
isLimited: true, isLimited: true,
reason: "global", reason: "global",
totalHits: globalTracker.count, totalHits: globalTracker.count,
resetTime: new Date((globalTracker.windowStart + Math.floor(windowMs / 1000)) * 1000) resetTime: new Date(
(globalTracker.windowStart + Math.floor(windowMs / 1000)) *
1000
)
}; };
} }
@@ -339,19 +384,32 @@ export class RateLimitService {
// Check message type specific rate limit if messageType is provided // Check message type specific rate limit if messageType is provided
if (messageType) { if (messageType) {
const messageTypeKey = `${clientId}:${messageType}`; const messageTypeKey = `${clientId}:${messageType}`;
let messageTypeTracker = this.localMessageTypeRateLimitTracker.get(messageTypeKey); let messageTypeTracker =
this.localMessageTypeRateLimitTracker.get(messageTypeKey);
if (!messageTypeTracker || messageTypeTracker.windowStart < windowStart) { if (
!messageTypeTracker ||
messageTypeTracker.windowStart < windowStart
) {
// New window or first request for this message type - initialize from Redis if available // New window or first request for this message type - initialize from Redis if available
messageTypeTracker = await this.initializeMessageTypeTracker(clientId, messageType); messageTypeTracker = await this.initializeMessageTypeTracker(
clientId,
messageType
);
messageTypeTracker.windowStart = currentTime; messageTypeTracker.windowStart = currentTime;
this.localMessageTypeRateLimitTracker.set(messageTypeKey, messageTypeTracker); this.localMessageTypeRateLimitTracker.set(
messageTypeKey,
messageTypeTracker
);
} }
// Increment message type counters // Increment message type counters
messageTypeTracker.count++; messageTypeTracker.count++;
messageTypeTracker.pendingCount++; messageTypeTracker.pendingCount++;
this.localMessageTypeRateLimitTracker.set(messageTypeKey, messageTypeTracker); this.localMessageTypeRateLimitTracker.set(
messageTypeKey,
messageTypeTracker
);
// Check if message type limit would be exceeded // Check if message type limit would be exceeded
if (messageTypeTracker.count >= messageTypeLimit) { if (messageTypeTracker.count >= messageTypeLimit) {
@@ -359,25 +417,38 @@ export class RateLimitService {
isLimited: true, isLimited: true,
reason: `message_type:${messageType}`, reason: `message_type:${messageType}`,
totalHits: messageTypeTracker.count, totalHits: messageTypeTracker.count,
resetTime: new Date((messageTypeTracker.windowStart + Math.floor(windowMs / 1000)) * 1000) resetTime: new Date(
(messageTypeTracker.windowStart +
Math.floor(windowMs / 1000)) *
1000
)
}; };
} }
// Sync to Redis if threshold reached // Sync to Redis if threshold reached
if (messageTypeTracker.pendingCount >= REDIS_SYNC_THRESHOLD) { if (messageTypeTracker.pendingCount >= REDIS_SYNC_THRESHOLD) {
this.syncMessageTypeRateLimitToRedis(clientId, messageType, messageTypeTracker); this.syncMessageTypeRateLimitToRedis(
clientId,
messageType,
messageTypeTracker
);
} }
} }
return { return {
isLimited: false, isLimited: false,
totalHits: globalTracker.count, totalHits: globalTracker.count,
resetTime: new Date((globalTracker.windowStart + Math.floor(windowMs / 1000)) * 1000) resetTime: new Date(
(globalTracker.windowStart + Math.floor(windowMs / 1000)) * 1000
)
}; };
} }
// Decrement function for skipSuccessfulRequests/skipFailedRequests functionality // Decrement function for skipSuccessfulRequests/skipFailedRequests functionality
async decrementRateLimit(clientId: string, messageType?: string): Promise<void> { async decrementRateLimit(
clientId: string,
messageType?: string
): Promise<void> {
// Decrement global counter // Decrement global counter
const globalTracker = this.localRateLimitTracker.get(clientId); const globalTracker = this.localRateLimitTracker.get(clientId);
if (globalTracker && globalTracker.count > 0) { if (globalTracker && globalTracker.count > 0) {
@@ -389,7 +460,8 @@ export class RateLimitService {
// Decrement message type counter if provided // Decrement message type counter if provided
if (messageType) { if (messageType) {
const messageTypeKey = `${clientId}:${messageType}`; const messageTypeKey = `${clientId}:${messageType}`;
const messageTypeTracker = this.localMessageTypeRateLimitTracker.get(messageTypeKey); const messageTypeTracker =
this.localMessageTypeRateLimitTracker.get(messageTypeKey);
if (messageTypeTracker && messageTypeTracker.count > 0) { if (messageTypeTracker && messageTypeTracker.count > 0) {
messageTypeTracker.count--; messageTypeTracker.count--;
messageTypeTracker.pendingCount--; messageTypeTracker.pendingCount--;
@@ -401,7 +473,7 @@ export class RateLimitService {
async resetKey(clientId: string): Promise<void> { async resetKey(clientId: string): Promise<void> {
// Remove from local tracking // Remove from local tracking
this.localRateLimitTracker.delete(clientId); this.localRateLimitTracker.delete(clientId);
// Remove all message type entries for this client // Remove all message type entries for this client
for (const [key] of this.localMessageTypeRateLimitTracker) { for (const [key] of this.localMessageTypeRateLimitTracker) {
if (key.startsWith(`${clientId}:`)) { if (key.startsWith(`${clientId}:`)) {
@@ -417,9 +489,13 @@ export class RateLimitService {
// Get all message type keys for this client and delete them // Get all message type keys for this client and delete them
const client = redisManager.getClient(); const client = redisManager.getClient();
if (client) { if (client) {
const messageTypeKeys = await client.keys(`ratelimit:${clientId}:*`); const messageTypeKeys = await client.keys(
`ratelimit:${clientId}:*`
);
if (messageTypeKeys.length > 0) { if (messageTypeKeys.length > 0) {
await Promise.all(messageTypeKeys.map(key => redisManager.del(key))); await Promise.all(
messageTypeKeys.map((key) => redisManager.del(key))
);
} }
} }
} }
@@ -431,7 +507,10 @@ export class RateLimitService {
const windowStart = currentTime - RATE_LIMIT_WINDOW; const windowStart = currentTime - RATE_LIMIT_WINDOW;
// Clean up global rate limit tracking and sync pending data // Clean up global rate limit tracking and sync pending data
for (const [clientId, tracker] of this.localRateLimitTracker.entries()) { for (const [
clientId,
tracker
] of this.localRateLimitTracker.entries()) {
if (tracker.windowStart < windowStart) { if (tracker.windowStart < windowStart) {
// Sync any pending data before cleanup // Sync any pending data before cleanup
if (tracker.pendingCount > 0) { if (tracker.pendingCount > 0) {
@@ -442,12 +521,19 @@ export class RateLimitService {
} }
// Clean up message type rate limit tracking and sync pending data // Clean up message type rate limit tracking and sync pending data
for (const [key, tracker] of this.localMessageTypeRateLimitTracker.entries()) { for (const [
key,
tracker
] of this.localMessageTypeRateLimitTracker.entries()) {
if (tracker.windowStart < windowStart) { if (tracker.windowStart < windowStart) {
// Sync any pending data before cleanup // Sync any pending data before cleanup
if (tracker.pendingCount > 0) { if (tracker.pendingCount > 0) {
const [clientId, messageType] = key.split(":", 2); const [clientId, messageType] = key.split(":", 2);
await this.syncMessageTypeRateLimitToRedis(clientId, messageType, tracker); await this.syncMessageTypeRateLimitToRedis(
clientId,
messageType,
tracker
);
} }
this.localMessageTypeRateLimitTracker.delete(key); this.localMessageTypeRateLimitTracker.delete(key);
} }
@@ -461,17 +547,27 @@ export class RateLimitService {
logger.debug("Force syncing all pending rate limit data to Redis..."); logger.debug("Force syncing all pending rate limit data to Redis...");
// Sync all pending global rate limits // Sync all pending global rate limits
for (const [clientId, tracker] of this.localRateLimitTracker.entries()) { for (const [
clientId,
tracker
] of this.localRateLimitTracker.entries()) {
if (tracker.pendingCount > 0) { if (tracker.pendingCount > 0) {
await this.syncRateLimitToRedis(clientId, tracker); await this.syncRateLimitToRedis(clientId, tracker);
} }
} }
// Sync all pending message type rate limits // Sync all pending message type rate limits
for (const [key, tracker] of this.localMessageTypeRateLimitTracker.entries()) { for (const [
key,
tracker
] of this.localMessageTypeRateLimitTracker.entries()) {
if (tracker.pendingCount > 0) { if (tracker.pendingCount > 0) {
const [clientId, messageType] = key.split(":", 2); const [clientId, messageType] = key.split(":", 2);
await this.syncMessageTypeRateLimitToRedis(clientId, messageType, tracker); await this.syncMessageTypeRateLimitToRedis(
clientId,
messageType,
tracker
);
} }
} }
@@ -504,4 +600,4 @@ export class RateLimitService {
} }
// Export singleton instance // Export singleton instance
export const rateLimitService = new RateLimitService(); export const rateLimitService = new RateLimitService();

View File

@@ -17,7 +17,10 @@ import { MemoryStore, Store } from "express-rate-limit";
import RedisStore from "#private/lib/redisStore"; import RedisStore from "#private/lib/redisStore";
export function createStore(): Store { export function createStore(): Store {
if (build != "oss" && privateConfig.getRawPrivateConfig().flags.enable_redis) { if (
build != "oss" &&
privateConfig.getRawPrivateConfig().flags.enable_redis
) {
const rateLimitStore: Store = new RedisStore({ const rateLimitStore: Store = new RedisStore({
prefix: "api-rate-limit", // Optional: customize Redis key prefix prefix: "api-rate-limit", // Optional: customize Redis key prefix
skipFailedRequests: true, // Don't count failed requests skipFailedRequests: true, // Don't count failed requests

View File

@@ -19,7 +19,7 @@ import { build } from "@server/build";
class RedisManager { class RedisManager {
public client: Redis | null = null; public client: Redis | null = null;
private writeClient: Redis | null = null; // Master for writes private writeClient: Redis | null = null; // Master for writes
private readClient: Redis | null = null; // Replica for reads private readClient: Redis | null = null; // Replica for reads
private subscriber: Redis | null = null; private subscriber: Redis | null = null;
private publisher: Redis | null = null; private publisher: Redis | null = null;
private isEnabled: boolean = false; private isEnabled: boolean = false;
@@ -46,7 +46,8 @@ class RedisManager {
this.isEnabled = false; this.isEnabled = false;
return; return;
} }
this.isEnabled = privateConfig.getRawPrivateConfig().flags.enable_redis || false; this.isEnabled =
privateConfig.getRawPrivateConfig().flags.enable_redis || false;
if (this.isEnabled) { if (this.isEnabled) {
this.initializeClients(); this.initializeClients();
} }
@@ -63,15 +64,19 @@ class RedisManager {
} }
private async triggerReconnectionCallbacks(): Promise<void> { private async triggerReconnectionCallbacks(): Promise<void> {
logger.info(`Triggering ${this.reconnectionCallbacks.size} reconnection callbacks`); logger.info(
`Triggering ${this.reconnectionCallbacks.size} reconnection callbacks`
const promises = Array.from(this.reconnectionCallbacks).map(async (callback) => { );
try {
await callback(); const promises = Array.from(this.reconnectionCallbacks).map(
} catch (error) { async (callback) => {
logger.error("Error in reconnection callback:", error); try {
await callback();
} catch (error) {
logger.error("Error in reconnection callback:", error);
}
} }
}); );
await Promise.allSettled(promises); await Promise.allSettled(promises);
} }
@@ -79,13 +84,17 @@ class RedisManager {
private async resubscribeToChannels(): Promise<void> { private async resubscribeToChannels(): Promise<void> {
if (!this.subscriber || this.subscribers.size === 0) return; if (!this.subscriber || this.subscribers.size === 0) return;
logger.info(`Re-subscribing to ${this.subscribers.size} channels after Redis reconnection`); logger.info(
`Re-subscribing to ${this.subscribers.size} channels after Redis reconnection`
);
try { try {
const channels = Array.from(this.subscribers.keys()); const channels = Array.from(this.subscribers.keys());
if (channels.length > 0) { if (channels.length > 0) {
await this.subscriber.subscribe(...channels); await this.subscriber.subscribe(...channels);
logger.info(`Successfully re-subscribed to channels: ${channels.join(', ')}`); logger.info(
`Successfully re-subscribed to channels: ${channels.join(", ")}`
);
} }
} catch (error) { } catch (error) {
logger.error("Failed to re-subscribe to channels:", error); logger.error("Failed to re-subscribe to channels:", error);
@@ -98,7 +107,7 @@ class RedisManager {
host: redisConfig.host!, host: redisConfig.host!,
port: redisConfig.port!, port: redisConfig.port!,
password: redisConfig.password, password: redisConfig.password,
db: redisConfig.db, db: redisConfig.db
// tls: { // tls: {
// rejectUnauthorized: // rejectUnauthorized:
// redisConfig.tls?.reject_unauthorized || false // redisConfig.tls?.reject_unauthorized || false
@@ -112,7 +121,7 @@ class RedisManager {
if (!redisConfig.replicas || redisConfig.replicas.length === 0) { if (!redisConfig.replicas || redisConfig.replicas.length === 0) {
return null; return null;
} }
// Use the first replica for simplicity // Use the first replica for simplicity
// In production, you might want to implement load balancing across replicas // In production, you might want to implement load balancing across replicas
const replica = redisConfig.replicas[0]; const replica = redisConfig.replicas[0];
@@ -120,7 +129,7 @@ class RedisManager {
host: replica.host!, host: replica.host!,
port: replica.port!, port: replica.port!,
password: replica.password, password: replica.password,
db: replica.db || redisConfig.db, db: replica.db || redisConfig.db
// tls: { // tls: {
// rejectUnauthorized: // rejectUnauthorized:
// replica.tls?.reject_unauthorized || false // replica.tls?.reject_unauthorized || false
@@ -133,7 +142,7 @@ class RedisManager {
private initializeClients(): void { private initializeClients(): void {
const masterConfig = this.getRedisConfig(); const masterConfig = this.getRedisConfig();
const replicaConfig = this.getReplicaRedisConfig(); const replicaConfig = this.getReplicaRedisConfig();
this.hasReplicas = replicaConfig !== null; this.hasReplicas = replicaConfig !== null;
try { try {
@@ -144,7 +153,7 @@ class RedisManager {
maxRetriesPerRequest: 3, maxRetriesPerRequest: 3,
keepAlive: 30000, keepAlive: 30000,
connectTimeout: this.connectionTimeout, connectTimeout: this.connectionTimeout,
commandTimeout: this.commandTimeout, commandTimeout: this.commandTimeout
}); });
// Initialize replica connection for reads (if available) // Initialize replica connection for reads (if available)
@@ -155,7 +164,7 @@ class RedisManager {
maxRetriesPerRequest: 3, maxRetriesPerRequest: 3,
keepAlive: 30000, keepAlive: 30000,
connectTimeout: this.connectionTimeout, connectTimeout: this.connectionTimeout,
commandTimeout: this.commandTimeout, commandTimeout: this.commandTimeout
}); });
} else { } else {
// Fallback to master for reads if no replicas // Fallback to master for reads if no replicas
@@ -172,7 +181,7 @@ class RedisManager {
maxRetriesPerRequest: 3, maxRetriesPerRequest: 3,
keepAlive: 30000, keepAlive: 30000,
connectTimeout: this.connectionTimeout, connectTimeout: this.connectionTimeout,
commandTimeout: this.commandTimeout, commandTimeout: this.commandTimeout
}); });
// Subscriber uses replica if available (reads) // Subscriber uses replica if available (reads)
@@ -182,7 +191,7 @@ class RedisManager {
maxRetriesPerRequest: 3, maxRetriesPerRequest: 3,
keepAlive: 30000, keepAlive: 30000,
connectTimeout: this.connectionTimeout, connectTimeout: this.connectionTimeout,
commandTimeout: this.commandTimeout, commandTimeout: this.commandTimeout
}); });
// Add reconnection handlers for write client // Add reconnection handlers for write client
@@ -202,11 +211,14 @@ class RedisManager {
logger.info("Redis write client ready"); logger.info("Redis write client ready");
this.isWriteHealthy = true; this.isWriteHealthy = true;
this.updateOverallHealth(); this.updateOverallHealth();
// Trigger reconnection callbacks when Redis comes back online // Trigger reconnection callbacks when Redis comes back online
if (this.isHealthy) { if (this.isHealthy) {
this.triggerReconnectionCallbacks().catch(error => { this.triggerReconnectionCallbacks().catch((error) => {
logger.error("Error triggering reconnection callbacks:", error); logger.error(
"Error triggering reconnection callbacks:",
error
);
}); });
} }
}); });
@@ -233,11 +245,14 @@ class RedisManager {
logger.info("Redis read client ready"); logger.info("Redis read client ready");
this.isReadHealthy = true; this.isReadHealthy = true;
this.updateOverallHealth(); this.updateOverallHealth();
// Trigger reconnection callbacks when Redis comes back online // Trigger reconnection callbacks when Redis comes back online
if (this.isHealthy) { if (this.isHealthy) {
this.triggerReconnectionCallbacks().catch(error => { this.triggerReconnectionCallbacks().catch((error) => {
logger.error("Error triggering reconnection callbacks:", error); logger.error(
"Error triggering reconnection callbacks:",
error
);
}); });
} }
}); });
@@ -298,8 +313,8 @@ class RedisManager {
} }
); );
const setupMessage = this.hasReplicas const setupMessage = this.hasReplicas
? "Redis clients initialized successfully with replica support" ? "Redis clients initialized successfully with replica support"
: "Redis clients initialized successfully (single instance)"; : "Redis clients initialized successfully (single instance)";
logger.info(setupMessage); logger.info(setupMessage);
@@ -313,7 +328,8 @@ class RedisManager {
private updateOverallHealth(): void { private updateOverallHealth(): void {
// Overall health is true if write is healthy and (read is healthy OR we don't have replicas) // Overall health is true if write is healthy and (read is healthy OR we don't have replicas)
this.isHealthy = this.isWriteHealthy && (this.isReadHealthy || !this.hasReplicas); this.isHealthy =
this.isWriteHealthy && (this.isReadHealthy || !this.hasReplicas);
} }
private async executeWithRetry<T>( private async executeWithRetry<T>(
@@ -322,49 +338,61 @@ class RedisManager {
fallbackOperation?: () => Promise<T> fallbackOperation?: () => Promise<T>
): Promise<T> { ): Promise<T> {
let lastError: Error | null = null; let lastError: Error | null = null;
for (let attempt = 0; attempt <= this.maxRetries; attempt++) { for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
try { try {
return await operation(); return await operation();
} catch (error) { } catch (error) {
lastError = error as Error; lastError = error as Error;
// If this is the last attempt, try fallback if available // If this is the last attempt, try fallback if available
if (attempt === this.maxRetries && fallbackOperation) { if (attempt === this.maxRetries && fallbackOperation) {
try { try {
logger.warn(`${operationName} primary operation failed, trying fallback`); logger.warn(
`${operationName} primary operation failed, trying fallback`
);
return await fallbackOperation(); return await fallbackOperation();
} catch (fallbackError) { } catch (fallbackError) {
logger.error(`${operationName} fallback also failed:`, fallbackError); logger.error(
`${operationName} fallback also failed:`,
fallbackError
);
throw lastError; throw lastError;
} }
} }
// Don't retry on the last attempt // Don't retry on the last attempt
if (attempt === this.maxRetries) { if (attempt === this.maxRetries) {
break; break;
} }
// Calculate delay with exponential backoff // Calculate delay with exponential backoff
const delay = Math.min( const delay = Math.min(
this.baseRetryDelay * Math.pow(this.backoffMultiplier, attempt), this.baseRetryDelay *
Math.pow(this.backoffMultiplier, attempt),
this.maxRetryDelay this.maxRetryDelay
); );
logger.warn(`${operationName} failed (attempt ${attempt + 1}/${this.maxRetries + 1}), retrying in ${delay}ms:`, error); logger.warn(
`${operationName} failed (attempt ${attempt + 1}/${this.maxRetries + 1}), retrying in ${delay}ms:`,
error
);
// Wait before retrying // Wait before retrying
await new Promise(resolve => setTimeout(resolve, delay)); await new Promise((resolve) => setTimeout(resolve, delay));
} }
} }
logger.error(`${operationName} failed after ${this.maxRetries + 1} attempts:`, lastError); logger.error(
`${operationName} failed after ${this.maxRetries + 1} attempts:`,
lastError
);
throw lastError; throw lastError;
} }
private startHealthMonitoring(): void { private startHealthMonitoring(): void {
if (!this.isEnabled) return; if (!this.isEnabled) return;
// Check health every 30 seconds // Check health every 30 seconds
setInterval(async () => { setInterval(async () => {
try { try {
@@ -381,7 +409,7 @@ class RedisManager {
private async checkRedisHealth(): Promise<boolean> { private async checkRedisHealth(): Promise<boolean> {
const now = Date.now(); const now = Date.now();
// Only check health every 30 seconds // Only check health every 30 seconds
if (now - this.lastHealthCheck < this.healthCheckInterval) { if (now - this.lastHealthCheck < this.healthCheckInterval) {
return this.isHealthy; return this.isHealthy;
@@ -400,24 +428,45 @@ class RedisManager {
// Check write client (master) health // Check write client (master) health
await Promise.race([ await Promise.race([
this.writeClient.ping(), this.writeClient.ping(),
new Promise((_, reject) => new Promise((_, reject) =>
setTimeout(() => reject(new Error('Write client health check timeout')), 2000) setTimeout(
() =>
reject(
new Error("Write client health check timeout")
),
2000
)
) )
]); ]);
this.isWriteHealthy = true; this.isWriteHealthy = true;
// Check read client health if it's different from write client // Check read client health if it's different from write client
if (this.hasReplicas && this.readClient && this.readClient !== this.writeClient) { if (
this.hasReplicas &&
this.readClient &&
this.readClient !== this.writeClient
) {
try { try {
await Promise.race([ await Promise.race([
this.readClient.ping(), this.readClient.ping(),
new Promise((_, reject) => new Promise((_, reject) =>
setTimeout(() => reject(new Error('Read client health check timeout')), 2000) setTimeout(
() =>
reject(
new Error(
"Read client health check timeout"
)
),
2000
)
) )
]); ]);
this.isReadHealthy = true; this.isReadHealthy = true;
} catch (error) { } catch (error) {
logger.error("Redis read client health check failed:", error); logger.error(
"Redis read client health check failed:",
error
);
this.isReadHealthy = false; this.isReadHealthy = false;
} }
} else { } else {
@@ -475,16 +524,13 @@ class RedisManager {
if (!this.isRedisEnabled() || !this.writeClient) return false; if (!this.isRedisEnabled() || !this.writeClient) return false;
try { try {
await this.executeWithRetry( await this.executeWithRetry(async () => {
async () => { if (ttl) {
if (ttl) { await this.writeClient!.setex(key, ttl, value);
await this.writeClient!.setex(key, ttl, value); } else {
} else { await this.writeClient!.set(key, value);
await this.writeClient!.set(key, value); }
} }, "Redis SET");
},
"Redis SET"
);
return true; return true;
} catch (error) { } catch (error) {
logger.error("Redis SET error:", error); logger.error("Redis SET error:", error);
@@ -496,9 +542,10 @@ class RedisManager {
if (!this.isRedisEnabled() || !this.readClient) return null; if (!this.isRedisEnabled() || !this.readClient) return null;
try { try {
const fallbackOperation = (this.hasReplicas && this.writeClient && this.isWriteHealthy) const fallbackOperation =
? () => this.writeClient!.get(key) this.hasReplicas && this.writeClient && this.isWriteHealthy
: undefined; ? () => this.writeClient!.get(key)
: undefined;
return await this.executeWithRetry( return await this.executeWithRetry(
() => this.readClient!.get(key), () => this.readClient!.get(key),
@@ -560,9 +607,10 @@ class RedisManager {
if (!this.isRedisEnabled() || !this.readClient) return []; if (!this.isRedisEnabled() || !this.readClient) return [];
try { try {
const fallbackOperation = (this.hasReplicas && this.writeClient && this.isWriteHealthy) const fallbackOperation =
? () => this.writeClient!.smembers(key) this.hasReplicas && this.writeClient && this.isWriteHealthy
: undefined; ? () => this.writeClient!.smembers(key)
: undefined;
return await this.executeWithRetry( return await this.executeWithRetry(
() => this.readClient!.smembers(key), () => this.readClient!.smembers(key),
@@ -598,9 +646,10 @@ class RedisManager {
if (!this.isRedisEnabled() || !this.readClient) return null; if (!this.isRedisEnabled() || !this.readClient) return null;
try { try {
const fallbackOperation = (this.hasReplicas && this.writeClient && this.isWriteHealthy) const fallbackOperation =
? () => this.writeClient!.hget(key, field) this.hasReplicas && this.writeClient && this.isWriteHealthy
: undefined; ? () => this.writeClient!.hget(key, field)
: undefined;
return await this.executeWithRetry( return await this.executeWithRetry(
() => this.readClient!.hget(key, field), () => this.readClient!.hget(key, field),
@@ -632,9 +681,10 @@ class RedisManager {
if (!this.isRedisEnabled() || !this.readClient) return {}; if (!this.isRedisEnabled() || !this.readClient) return {};
try { try {
const fallbackOperation = (this.hasReplicas && this.writeClient && this.isWriteHealthy) const fallbackOperation =
? () => this.writeClient!.hgetall(key) this.hasReplicas && this.writeClient && this.isWriteHealthy
: undefined; ? () => this.writeClient!.hgetall(key)
: undefined;
return await this.executeWithRetry( return await this.executeWithRetry(
() => this.readClient!.hgetall(key), () => this.readClient!.hgetall(key),
@@ -658,18 +708,18 @@ class RedisManager {
} }
try { try {
await this.executeWithRetry( await this.executeWithRetry(async () => {
async () => { // Add timeout to prevent hanging
// Add timeout to prevent hanging return Promise.race([
return Promise.race([ this.publisher!.publish(channel, message),
this.publisher!.publish(channel, message), new Promise((_, reject) =>
new Promise((_, reject) => setTimeout(
setTimeout(() => reject(new Error('Redis publish timeout')), 3000) () => reject(new Error("Redis publish timeout")),
3000
) )
]); )
}, ]);
"Redis PUBLISH" }, "Redis PUBLISH");
);
return true; return true;
} catch (error) { } catch (error) {
logger.error("Redis PUBLISH error:", error); logger.error("Redis PUBLISH error:", error);
@@ -689,17 +739,20 @@ class RedisManager {
if (!this.subscribers.has(channel)) { if (!this.subscribers.has(channel)) {
this.subscribers.set(channel, new Set()); this.subscribers.set(channel, new Set());
// Only subscribe to the channel if it's the first subscriber // Only subscribe to the channel if it's the first subscriber
await this.executeWithRetry( await this.executeWithRetry(async () => {
async () => { return Promise.race([
return Promise.race([ this.subscriber!.subscribe(channel),
this.subscriber!.subscribe(channel), new Promise((_, reject) =>
new Promise((_, reject) => setTimeout(
setTimeout(() => reject(new Error('Redis subscribe timeout')), 5000) () =>
reject(
new Error("Redis subscribe timeout")
),
5000
) )
]); )
}, ]);
"Redis SUBSCRIBE" }, "Redis SUBSCRIBE");
);
} }
this.subscribers.get(channel)!.add(callback); this.subscribers.get(channel)!.add(callback);

View File

@@ -11,9 +11,9 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import { Store, Options, IncrementResponse } from 'express-rate-limit'; import { Store, Options, IncrementResponse } from "express-rate-limit";
import { rateLimitService } from './rateLimit'; import { rateLimitService } from "./rateLimit";
import logger from '@server/logger'; import logger from "@server/logger";
/** /**
* A Redis-backed rate limiting store for express-rate-limit that optimizes * A Redis-backed rate limiting store for express-rate-limit that optimizes
@@ -57,12 +57,14 @@ export default class RedisStore implements Store {
* *
* @param options - Configuration options for the store. * @param options - Configuration options for the store.
*/ */
constructor(options: { constructor(
prefix?: string; options: {
skipFailedRequests?: boolean; prefix?: string;
skipSuccessfulRequests?: boolean; skipFailedRequests?: boolean;
} = {}) { skipSuccessfulRequests?: boolean;
this.prefix = options.prefix || 'express-rate-limit'; } = {}
) {
this.prefix = options.prefix || "express-rate-limit";
this.skipFailedRequests = options.skipFailedRequests || false; this.skipFailedRequests = options.skipFailedRequests || false;
this.skipSuccessfulRequests = options.skipSuccessfulRequests || false; this.skipSuccessfulRequests = options.skipSuccessfulRequests || false;
} }
@@ -101,7 +103,8 @@ export default class RedisStore implements Store {
return { return {
totalHits: result.totalHits || 1, totalHits: result.totalHits || 1,
resetTime: result.resetTime || new Date(Date.now() + this.windowMs) resetTime:
result.resetTime || new Date(Date.now() + this.windowMs)
}; };
} catch (error) { } catch (error) {
logger.error(`RedisStore increment error for key ${key}:`, error); logger.error(`RedisStore increment error for key ${key}:`, error);
@@ -158,7 +161,9 @@ export default class RedisStore implements Store {
*/ */
async resetAll(): Promise<void> { async resetAll(): Promise<void> {
try { try {
logger.warn('RedisStore resetAll called - this operation can be expensive'); logger.warn(
"RedisStore resetAll called - this operation can be expensive"
);
// Force sync all pending data first // Force sync all pending data first
await rateLimitService.forceSyncAllPendingData(); await rateLimitService.forceSyncAllPendingData();
@@ -167,9 +172,9 @@ export default class RedisStore implements Store {
// scanning all Redis keys with our prefix, which could be expensive. // scanning all Redis keys with our prefix, which could be expensive.
// In production, it's better to let entries expire naturally. // In production, it's better to let entries expire naturally.
logger.info('RedisStore resetAll completed (pending data synced)'); logger.info("RedisStore resetAll completed (pending data synced)");
} catch (error) { } catch (error) {
logger.error('RedisStore resetAll error:', error); logger.error("RedisStore resetAll error:", error);
// Don't throw - this is an optional method // Don't throw - this is an optional method
} }
} }
@@ -181,7 +186,9 @@ export default class RedisStore implements Store {
* @param key - The identifier for a client. * @param key - The identifier for a client.
* @returns Current hit count and reset time, or null if no data exists. * @returns Current hit count and reset time, or null if no data exists.
*/ */
async getHits(key: string): Promise<{ totalHits: number; resetTime: Date } | null> { async getHits(
key: string
): Promise<{ totalHits: number; resetTime: Date } | null> {
try { try {
const clientId = `${this.prefix}:${key}`; const clientId = `${this.prefix}:${key}`;
@@ -200,7 +207,8 @@ export default class RedisStore implements Store {
return { return {
totalHits: Math.max(0, (result.totalHits || 0) - 1), // Adjust for the decrement totalHits: Math.max(0, (result.totalHits || 0) - 1), // Adjust for the decrement
resetTime: result.resetTime || new Date(Date.now() + this.windowMs) resetTime:
result.resetTime || new Date(Date.now() + this.windowMs)
}; };
} catch (error) { } catch (error) {
logger.error(`RedisStore getHits error for key ${key}:`, error); logger.error(`RedisStore getHits error for key ${key}:`, error);
@@ -215,9 +223,9 @@ export default class RedisStore implements Store {
async shutdown(): Promise<void> { async shutdown(): Promise<void> {
try { try {
// The rateLimitService handles its own cleanup // The rateLimitService handles its own cleanup
logger.info('RedisStore shutdown completed'); logger.info("RedisStore shutdown completed");
} catch (error) { } catch (error) {
logger.error('RedisStore shutdown error:', error); logger.error("RedisStore shutdown error:", error);
} }
} }
} }

View File

@@ -16,10 +16,10 @@ import privateConfig from "#private/lib/config";
import logger from "@server/logger"; import logger from "@server/logger";
export enum AudienceIds { export enum AudienceIds {
SignUps = "6c4e77b2-0851-4bd6-bac8-f51f91360f1a", SignUps = "6c4e77b2-0851-4bd6-bac8-f51f91360f1a",
Subscribed = "870b43fd-387f-44de-8fc1-707335f30b20", Subscribed = "870b43fd-387f-44de-8fc1-707335f30b20",
Churned = "f3ae92bd-2fdb-4d77-8746-2118afd62549", Churned = "f3ae92bd-2fdb-4d77-8746-2118afd62549",
Newsletter = "5500c431-191c-42f0-a5d4-8b6d445b4ea0" Newsletter = "5500c431-191c-42f0-a5d4-8b6d445b4ea0"
} }
const resend = new Resend( const resend = new Resend(
@@ -33,7 +33,9 @@ export async function moveEmailToAudience(
audienceId: AudienceIds audienceId: AudienceIds
) { ) {
if (process.env.ENVIRONMENT !== "prod") { if (process.env.ENVIRONMENT !== "prod") {
logger.debug(`Skipping moving email ${email} to audience ${audienceId} in non-prod environment`); logger.debug(
`Skipping moving email ${email} to audience ${audienceId} in non-prod environment`
);
return; return;
} }
const { error, data } = await retryWithBackoff(async () => { const { error, data } = await retryWithBackoff(async () => {

View File

@@ -11,4 +11,4 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
export * from "./getTraefikConfig"; export * from "./getTraefikConfig";

View File

@@ -19,10 +19,7 @@ import * as crypto from "crypto";
* @param publicKey - The public key used for verification (PEM format) * @param publicKey - The public key used for verification (PEM format)
* @returns The decoded payload if validation succeeds, throws an error otherwise * @returns The decoded payload if validation succeeds, throws an error otherwise
*/ */
function validateJWT<Payload>( function validateJWT<Payload>(token: string, publicKey: string): Payload {
token: string,
publicKey: string
): Payload {
// Split the JWT into its three parts // Split the JWT into its three parts
const parts = token.split("."); const parts = token.split(".");
if (parts.length !== 3) { if (parts.length !== 3) {

View File

@@ -41,7 +41,11 @@ async function getActionDays(orgId: string): Promise<number> {
} }
// store the result in cache // store the result in cache
cache.set(`org_${orgId}_actionDays`, org.settingsLogRetentionDaysAction, 300); cache.set(
`org_${orgId}_actionDays`,
org.settingsLogRetentionDaysAction,
300
);
return org.settingsLogRetentionDaysAction; return org.settingsLogRetentionDaysAction;
} }
@@ -141,4 +145,3 @@ export function logActionAudit(action: ActionsEnum) {
} }
}; };
} }

View File

@@ -28,7 +28,8 @@ export async function verifyCertificateAccess(
try { try {
// Assume user/org access is already verified // Assume user/org access is already verified
const orgId = req.params.orgId; const orgId = req.params.orgId;
const certId = req.params.certId || req.body?.certId || req.query?.certId; const certId =
req.params.certId || req.body?.certId || req.query?.certId;
let domainId = let domainId =
req.params.domainId || req.body?.domainId || req.query?.domainId; req.params.domainId || req.body?.domainId || req.query?.domainId;
@@ -39,10 +40,12 @@ export async function verifyCertificateAccess(
} }
if (!domainId) { if (!domainId) {
if (!certId) { if (!certId) {
return next( return next(
createHttpError(HttpCode.BAD_REQUEST, "Must provide either certId or domainId") createHttpError(
HttpCode.BAD_REQUEST,
"Must provide either certId or domainId"
)
); );
} }
@@ -75,7 +78,10 @@ export async function verifyCertificateAccess(
if (!domainId) { if (!domainId) {
return next( return next(
createHttpError(HttpCode.BAD_REQUEST, "Must provide either certId or domainId") createHttpError(
HttpCode.BAD_REQUEST,
"Must provide either certId or domainId"
)
); );
} }

View File

@@ -24,8 +24,7 @@ export async function verifyIdpAccess(
) { ) {
try { try {
const userId = req.user!.userId; const userId = req.user!.userId;
const idpId = const idpId = req.params.idpId || req.body.idpId || req.query.idpId;
req.params.idpId || req.body.idpId || req.query.idpId;
const orgId = req.params.orgId; const orgId = req.params.orgId;
if (!userId) { if (!userId) {
@@ -50,9 +49,7 @@ export async function verifyIdpAccess(
.select() .select()
.from(idp) .from(idp)
.innerJoin(idpOrg, eq(idp.idpId, idpOrg.idpId)) .innerJoin(idpOrg, eq(idp.idpId, idpOrg.idpId))
.where( .where(and(eq(idp.idpId, idpId), eq(idpOrg.orgId, orgId)))
and(eq(idp.idpId, idpId), eq(idpOrg.orgId, orgId))
)
.limit(1); .limit(1);
if (!idpRes || !idpRes.idp || !idpRes.idpOrg) { if (!idpRes || !idpRes.idp || !idpRes.idpOrg) {

View File

@@ -26,7 +26,8 @@ export const verifySessionRemoteExitNodeMiddleware = async (
// get the token from the auth header // get the token from the auth header
const token = req.headers["authorization"]?.split(" ")[1] || ""; const token = req.headers["authorization"]?.split(" ")[1] || "";
const { session, remoteExitNode } = await validateRemoteExitNodeSessionToken(token); const { session, remoteExitNode } =
await validateRemoteExitNodeSessionToken(token);
if (!session || !remoteExitNode) { if (!session || !remoteExitNode) {
if (config.getRawConfig().app.log_failed_attempts) { if (config.getRawConfig().app.log_failed_attempts) {

View File

@@ -19,7 +19,11 @@ import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import logger from "@server/logger"; import logger from "@server/logger";
import { queryAccessAuditLogsParams, queryAccessAuditLogsQuery, queryAccess } from "./queryAccessAuditLog"; import {
queryAccessAuditLogsParams,
queryAccessAuditLogsQuery,
queryAccess
} from "./queryAccessAuditLog";
import { generateCSV } from "@server/routers/auditLogs/generateCSV"; import { generateCSV } from "@server/routers/auditLogs/generateCSV";
registry.registerPath({ registry.registerPath({
@@ -67,10 +71,13 @@ export async function exportAccessAuditLogs(
const log = await baseQuery.limit(data.limit).offset(data.offset); const log = await baseQuery.limit(data.limit).offset(data.offset);
const csvData = generateCSV(log); const csvData = generateCSV(log);
res.setHeader('Content-Type', 'text/csv'); res.setHeader("Content-Type", "text/csv");
res.setHeader('Content-Disposition', `attachment; filename="access-audit-logs-${data.orgId}-${Date.now()}.csv"`); res.setHeader(
"Content-Disposition",
`attachment; filename="access-audit-logs-${data.orgId}-${Date.now()}.csv"`
);
return res.send(csvData); return res.send(csvData);
} catch (error) { } catch (error) {
logger.error(error); logger.error(error);
@@ -78,4 +85,4 @@ export async function exportAccessAuditLogs(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
); );
} }
} }

View File

@@ -19,7 +19,11 @@ import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import logger from "@server/logger"; import logger from "@server/logger";
import { queryActionAuditLogsParams, queryActionAuditLogsQuery, queryAction } from "./queryActionAuditLog"; import {
queryActionAuditLogsParams,
queryActionAuditLogsQuery,
queryAction
} from "./queryActionAuditLog";
import { generateCSV } from "@server/routers/auditLogs/generateCSV"; import { generateCSV } from "@server/routers/auditLogs/generateCSV";
registry.registerPath({ registry.registerPath({
@@ -60,17 +64,20 @@ export async function exportActionAuditLogs(
); );
} }
const data = { ...parsedQuery.data, ...parsedParams.data }; const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryAction(data); const baseQuery = queryAction(data);
const log = await baseQuery.limit(data.limit).offset(data.offset); const log = await baseQuery.limit(data.limit).offset(data.offset);
const csvData = generateCSV(log); const csvData = generateCSV(log);
res.setHeader('Content-Type', 'text/csv'); res.setHeader("Content-Type", "text/csv");
res.setHeader('Content-Disposition', `attachment; filename="action-audit-logs-${data.orgId}-${Date.now()}.csv"`); res.setHeader(
"Content-Disposition",
`attachment; filename="action-audit-logs-${data.orgId}-${Date.now()}.csv"`
);
return res.send(csvData); return res.send(csvData);
} catch (error) { } catch (error) {
logger.error(error); logger.error(error);
@@ -78,4 +85,4 @@ export async function exportActionAuditLogs(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
); );
} }
} }

View File

@@ -14,4 +14,4 @@
export * from "./queryActionAuditLog"; export * from "./queryActionAuditLog";
export * from "./exportActionAuditLog"; export * from "./exportActionAuditLog";
export * from "./queryAccessAuditLog"; export * from "./queryAccessAuditLog";
export * from "./exportAccessAuditLog"; export * from "./exportAccessAuditLog";

View File

@@ -44,7 +44,8 @@ export const queryAccessAuditLogsQuery = z.object({
.openapi({ .openapi({
type: "string", type: "string",
format: "date-time", format: "date-time",
description: "End time as ISO date string (defaults to current time)" description:
"End time as ISO date string (defaults to current time)"
}), }),
action: z action: z
.union([z.boolean(), z.string()]) .union([z.boolean(), z.string()])
@@ -181,9 +182,15 @@ async function queryUniqueFilterAttributes(
.where(baseConditions); .where(baseConditions);
return { return {
actors: uniqueActors.map(row => row.actor).filter((actor): actor is string => actor !== null), actors: uniqueActors
resources: uniqueResources.filter((row): row is { id: number; name: string | null } => row.id !== null), .map((row) => row.actor)
locations: uniqueLocations.map(row => row.locations).filter((location): location is string => location !== null) .filter((actor): actor is string => actor !== null),
resources: uniqueResources.filter(
(row): row is { id: number; name: string | null } => row.id !== null
),
locations: uniqueLocations
.map((row) => row.locations)
.filter((location): location is string => location !== null)
}; };
} }

View File

@@ -44,7 +44,8 @@ export const queryActionAuditLogsQuery = z.object({
.openapi({ .openapi({
type: "string", type: "string",
format: "date-time", format: "date-time",
description: "End time as ISO date string (defaults to current time)" description:
"End time as ISO date string (defaults to current time)"
}), }),
action: z.string().optional(), action: z.string().optional(),
actorType: z.string().optional(), actorType: z.string().optional(),
@@ -68,8 +69,9 @@ export const queryActionAuditLogsParams = z.object({
orgId: z.string() orgId: z.string()
}); });
export const queryActionAuditLogsCombined = export const queryActionAuditLogsCombined = queryActionAuditLogsQuery.merge(
queryActionAuditLogsQuery.merge(queryActionAuditLogsParams); queryActionAuditLogsParams
);
type Q = z.infer<typeof queryActionAuditLogsCombined>; type Q = z.infer<typeof queryActionAuditLogsCombined>;
function getWhere(data: Q) { function getWhere(data: Q) {
@@ -78,7 +80,9 @@ function getWhere(data: Q) {
lt(actionAuditLog.timestamp, data.timeEnd), lt(actionAuditLog.timestamp, data.timeEnd),
eq(actionAuditLog.orgId, data.orgId), eq(actionAuditLog.orgId, data.orgId),
data.actor ? eq(actionAuditLog.actor, data.actor) : undefined, data.actor ? eq(actionAuditLog.actor, data.actor) : undefined,
data.actorType ? eq(actionAuditLog.actorType, data.actorType) : undefined, data.actorType
? eq(actionAuditLog.actorType, data.actorType)
: undefined,
data.actorId ? eq(actionAuditLog.actorId, data.actorId) : undefined, data.actorId ? eq(actionAuditLog.actorId, data.actorId) : undefined,
data.action ? eq(actionAuditLog.action, data.action) : undefined data.action ? eq(actionAuditLog.action, data.action) : undefined
); );
@@ -135,8 +139,12 @@ async function queryUniqueFilterAttributes(
.where(baseConditions); .where(baseConditions);
return { return {
actors: uniqueActors.map(row => row.actor).filter((actor): actor is string => actor !== null), actors: uniqueActors
actions: uniqueActions.map(row => row.action).filter((action): action is string => action !== null), .map((row) => row.actor)
.filter((actor): actor is string => actor !== null),
actions: uniqueActions
.map((row) => row.action)
.filter((action): action is string => action !== null)
}; };
} }

View File

@@ -13,4 +13,4 @@
export * from "./transferSession"; export * from "./transferSession";
export * from "./getSessionTransferToken"; export * from "./getSessionTransferToken";
export * from "./quickStart"; export * from "./quickStart";

View File

@@ -395,7 +395,8 @@ export async function quickStart(
.values({ .values({
targetId: newTarget[0].targetId, targetId: newTarget[0].targetId,
hcEnabled: false hcEnabled: false
}).returning(); })
.returning();
// add the new target to the targetIps array // add the new target to the targetIps array
targetIps.push(`${ip}/32`); targetIps.push(`${ip}/32`);
@@ -406,7 +407,12 @@ export async function quickStart(
.where(eq(newts.siteId, siteId!)) .where(eq(newts.siteId, siteId!))
.limit(1); .limit(1);
await addTargets(newt.newtId, newTarget, newHealthcheck, resource.protocol); await addTargets(
newt.newtId,
newTarget,
newHealthcheck,
resource.protocol
);
// Set resource pincode if provided // Set resource pincode if provided
if (pincode) { if (pincode) {

Some files were not shown because too many files have changed in this diff Show More