Merge remote-tracking branch 'upstream/dev' into fix-dev

merge dev
This commit is contained in:
Lokowitz
2025-12-08 08:21:19 +00:00
20 changed files with 1050 additions and 155 deletions

View File

@@ -1320,9 +1320,9 @@
"productUpdateTitle": "Product Updates",
"productUpdateEmpty": "No updates",
"dismissAll": "Dismiss all",
"pangolinUpdateAvailable": "New version available",
"pangolinUpdateAvailable": "Update Available",
"pangolinUpdateAvailableInfo": "Version {version} is ready to install",
"pangolinUpdateAvailableReleaseNotes": "View release notes",
"pangolinUpdateAvailableReleaseNotes": "View Release Notes",
"newtUpdateAvailable": "Update Available",
"newtUpdateAvailableInfo": "A new version of Newt is available. Please update to the latest version for the best experience.",
"domainPickerEnterDomain": "Domain",

View File

@@ -133,15 +133,12 @@ export async function applyBlueprint({
`Updating client resource ${result.newSiteResource.siteResourceId} on site ${site.sites.siteId}`
);
if (result.oldSiteResource) {
// this was an update
await handleMessagingForUpdatedSiteResource(
result.oldSiteResource,
result.newSiteResource,
{ siteId: site.sites.siteId, orgId: site.sites.orgId },
trx
);
}
await handleMessagingForUpdatedSiteResource(
result.oldSiteResource,
result.newSiteResource,
{ siteId: site.sites.siteId, orgId: site.sites.orgId },
trx
);
// await addClientTargets(
// site.newt.newtId,
@@ -188,4 +185,4 @@ export async function applyBlueprint({
}
return blueprint;
}
}

View File

@@ -328,111 +328,103 @@ export const ConfigSchema = z
sites: Record<string, z.infer<typeof SiteSchema>>;
};
})
.refine(
.superRefine((config, ctx) => {
// Enforce the full-domain uniqueness across resources in the same stack
(config) => {
// Extract duplicates for error message
const fullDomainMap = new Map<string, string[]>();
const fullDomainMap = new Map<string, string[]>();
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const fullDomain = resource["full-domain"];
if (fullDomain) {
// Only process if full-domain is defined
if (!fullDomainMap.has(fullDomain)) {
fullDomainMap.set(fullDomain, []);
}
fullDomainMap.get(fullDomain)!.push(resourceKey);
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const fullDomain = resource["full-domain"];
if (fullDomain) {
// Only process if full-domain is defined
if (!fullDomainMap.has(fullDomain)) {
fullDomainMap.set(fullDomain, []);
}
fullDomainMap.get(fullDomain)!.push(resourceKey);
}
);
const duplicates = Array.from(fullDomainMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([fullDomain, resourceKeys]) =>
`'${fullDomain}' used by resources: ${resourceKeys.join(", ")}`
)
.join("; ");
if (duplicates.length !== 0) {
return {
path: ["resources"],
error: `Duplicate 'full-domain' values found: ${duplicates}`
};
}
);
const fullDomainDuplicates = Array.from(fullDomainMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([fullDomain, resourceKeys]) =>
`'${fullDomain}' used by resources: ${resourceKeys.join(", ")}`
)
.join("; ");
if (fullDomainDuplicates.length !== 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["proxy-resources"],
message: `Duplicate 'full-domain' values found: ${fullDomainDuplicates}`
});
}
)
.refine(
// Enforce proxy-port uniqueness within proxy-resources per protocol
(config) => {
// Extract duplicates for error message
const protocolPortMap = new Map<string, string[]>();
const protocolPortMap = new Map<string, string[]>();
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const proxyPort = resource["proxy-port"];
const protocol = resource.protocol;
if (proxyPort !== undefined && protocol !== undefined) {
const key = `${protocol}:${proxyPort}`;
if (!protocolPortMap.has(key)) {
protocolPortMap.set(key, []);
}
protocolPortMap.get(key)!.push(resourceKey);
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const proxyPort = resource["proxy-port"];
const protocol = resource.protocol;
if (proxyPort !== undefined && protocol !== undefined) {
const key = `${protocol}:${proxyPort}`;
if (!protocolPortMap.has(key)) {
protocolPortMap.set(key, []);
}
protocolPortMap.get(key)!.push(resourceKey);
}
);
const duplicates = Array.from(protocolPortMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(([protocolPort, resourceKeys]) => {
const [protocol, port] = protocolPort.split(":");
return `${protocol.toUpperCase()} port ${port} used by proxy-resources: ${resourceKeys.join(", ")}`;
})
.join("; ");
if (duplicates.length !== 0) {
return {
path: ["proxy-resources"],
error: `Duplicate 'proxy-port' values found in proxy-resources: ${duplicates}`
};
}
);
const portDuplicates = Array.from(protocolPortMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(([protocolPort, resourceKeys]) => {
const [protocol, port] = protocolPort.split(":");
return `${protocol.toUpperCase()} port ${port} used by proxy-resources: ${resourceKeys.join(", ")}`;
})
.join("; ");
if (portDuplicates.length !== 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["proxy-resources"],
message: `Duplicate 'proxy-port' values found in proxy-resources: ${portDuplicates}`
});
}
)
.refine(
// Enforce alias uniqueness within client-resources
(config) => {
// Extract duplicates for error message
const aliasMap = new Map<string, string[]>();
const aliasMap = new Map<string, string[]>();
Object.entries(config["client-resources"]).forEach(
([resourceKey, resource]) => {
const alias = resource.alias;
if (alias !== undefined) {
if (!aliasMap.has(alias)) {
aliasMap.set(alias, []);
}
aliasMap.get(alias)!.push(resourceKey);
Object.entries(config["client-resources"]).forEach(
([resourceKey, resource]) => {
const alias = resource.alias;
if (alias !== undefined) {
if (!aliasMap.has(alias)) {
aliasMap.set(alias, []);
}
aliasMap.get(alias)!.push(resourceKey);
}
);
const duplicates = Array.from(aliasMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([alias, resourceKeys]) =>
`alias '${alias}' used by client-resources: ${resourceKeys.join(", ")}`
)
.join("; ");
if (duplicates.length !== 0) {
return {
path: ["client-resources"],
error: `Duplicate 'alias' values found in client-resources: ${duplicates}`
};
}
);
const aliasDuplicates = Array.from(aliasMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([alias, resourceKeys]) =>
`alias '${alias}' used by client-resources: ${resourceKeys.join(", ")}`
)
.join("; ");
if (aliasDuplicates.length !== 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["client-resources"],
message: `Duplicate 'alias' values found in client-resources: ${aliasDuplicates}`
});
}
);
});
// Type inference from the schema
export type Site = z.infer<typeof SiteSchema>;

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.13.0";
export const APP_VERSION = "1.13.0-rc.0";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -142,8 +142,24 @@ export class TraefikConfigManager {
const wildcardExists = await this.fileExists(wildcardPath);
let lastModified: Date | null = null;
const expiresAt: Date | null = null;
let expiresAt: number | null = null;
let wildcard = false;
const expiresAtPath = path.join(domainDir, ".expires_at");
const expiresAtExists = await this.fileExists(expiresAtPath);
if (expiresAtExists) {
try {
const expiresAtStr = fs
.readFileSync(expiresAtPath, "utf8")
.trim();
expiresAt = parseInt(expiresAtStr, 10);
if (isNaN(expiresAt)) {
expiresAt = null;
}
} catch {
expiresAt = null;
}
}
if (lastUpdateExists) {
try {
@@ -179,7 +195,7 @@ export class TraefikConfigManager {
state.set(domain, {
exists: certExists && keyExists,
lastModified,
lastModified: lastModified ? Math.floor(lastModified.getTime() / 1000) : null,
expiresAt,
wildcard
});
@@ -259,9 +275,9 @@ export class TraefikConfigManager {
// Check if certificate is expiring soon (within 30 days)
if (localState.expiresAt) {
const daysUntilExpiry =
(localState.expiresAt - Math.floor(Date.now() / 1000)) /
(1000 * 60 * 60 * 24);
const nowInSeconds = Math.floor(Date.now() / 1000);
const secondsUntilExpiry = localState.expiresAt - nowInSeconds;
const daysUntilExpiry = secondsUntilExpiry / (60 * 60 * 24);
if (daysUntilExpiry < 30) {
logger.info(
`Fetching certificates due to upcoming expiry for ${domain} (${Math.round(daysUntilExpiry)} days remaining)`
@@ -770,6 +786,16 @@ export class TraefikConfigManager {
"utf8"
);
// Store the certificate expiry time
if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at");
fs.writeFileSync(
expiresAtPath,
cert.expiresAt.toString(),
"utf8"
);
}
logger.info(
`Certificate updated for domain: ${cert.domain}${cert.wildcard ? " (wildcard)" : ""}`
);

View File

@@ -5,7 +5,6 @@ import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import logger from "@server/logger";
export async function verifyOrgAccess(
req: Request,
@@ -27,8 +26,6 @@ export async function verifyOrgAccess(
);
}
logger.debug(`Verifying access for user ${userId} to organization ${orgId}`);
try {
if (!req.userOrg) {
const userOrgRes = await db
@@ -71,9 +68,6 @@ export async function verifyOrgAccess(
req.userOrgRoleId = req.userOrg.roleId;
req.userOrgId = orgId;
logger.debug(
`User ${userId} has access to organization ${orgId} with role ${req.userOrg.roleId}`
);
return next();
} catch (e) {
return next(

View File

@@ -76,7 +76,7 @@ export async function verifySiteAccess(
.select()
.from(userOrgs)
.where(
and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, orgId))
and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, site.orgId))
)
.limit(1);
req.userOrg = userOrgRole[0];

View File

@@ -220,6 +220,7 @@ export async function createUserClient(
niceId,
exitNodeId: randomExitNode.exitNodeId,
orgId,
niceId,
name,
subnet: updatedSubnet,
type,

View File

@@ -7,6 +7,8 @@ import logger from "@server/logger";
import { validateSessionToken } from "@server/auth/sessions/app";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import { sendTerminateClient } from "../client/terminate";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
// Track if the offline checker interval is running
let offlineCheckerInterval: NodeJS.Timeout | null = null;
@@ -133,10 +135,14 @@ export const handleOlmPingMessage: MessageHandler = async (context) => {
return;
}
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(userToken))
);
const policyCheck = await checkOrgAccessPolicy({
orgId: client.orgId,
userId: olm.userId,
sessionId: userToken // this is the user token passed in the message
sessionId // this is the user token passed in the message
});
if (!policyCheck.allowed) {

View File

@@ -1,17 +1,8 @@
import {
Client,
clientSiteResourcesAssociationsCache,
db,
ExitNode,
Org,
orgs,
roleClients,
roles,
siteResources,
Transaction,
userClients,
userOrgs,
users
siteResources
} from "@server/db";
import { MessageHandler } from "@server/routers/ws";
import {
@@ -25,16 +16,13 @@ import {
import { and, eq, inArray, isNull } from "drizzle-orm";
import { addPeer, deletePeer } from "../newt/peers";
import logger from "@server/logger";
import { listExitNodes } from "#dynamic/lib/exitNodes";
import {
generateAliasConfig,
getNextAvailableClientSubnet
} from "@server/lib/ip";
import { generateAliasConfig } from "@server/lib/ip";
import { generateRemoteSubnets } from "@server/lib/ip";
import { rebuildClientAssociationsFromClient } from "@server/lib/rebuildClientAssociations";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import { validateSessionToken } from "@server/auth/sessions/app";
import config from "@server/lib/config";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
export const handleOlmRegisterMessage: MessageHandler = async (context) => {
logger.info("Handling register olm message!");
@@ -48,7 +36,8 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
return;
}
const { publicKey, relay, olmVersion, olmAgent, orgId, userToken } = message.data;
const { publicKey, relay, olmVersion, olmAgent, orgId, userToken } =
message.data;
if (!olm.clientId) {
logger.warn("Olm client ID not found");
@@ -94,10 +83,14 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
return;
}
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(userToken))
);
const policyCheck = await checkOrgAccessPolicy({
orgId: orgId,
userId: olm.userId,
sessionId: userToken // this is the user token passed in the message
sessionId // this is the user token passed in the message
});
if (!policyCheck.allowed) {
@@ -117,7 +110,10 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
return;
}
if ((olmVersion && olm.version !== olmVersion) || (olmAgent && olm.agent !== olmAgent)) {
if (
(olmVersion && olm.version !== olmVersion) ||
(olmAgent && olm.agent !== olmAgent)
) {
await db
.update(olms)
.set({
@@ -175,7 +171,10 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
}
// Process each site
for (const { sites: site, clientSitesAssociationsCache: association } of sitesData) {
for (const {
sites: site,
clientSitesAssociationsCache: association
} of sitesData) {
if (!site.exitNodeId) {
logger.warn(
`Site ${site.siteId} does not have exit node, skipping`
@@ -275,6 +274,7 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
// Add site configuration to the array
siteConfigurations.push({
siteId: site.siteId,
name: site.name,
// relayEndpoint: relayEndpoint, // this can be undefined now if not relayed // lets not do this for now because it would conflict with the hole punch testing
endpoint: site.endpoint,
publicKey: site.publicKey,

View File

@@ -169,6 +169,7 @@ export const handleOlmServerPeerAddMessage: MessageHandler = async (
type: "olm/wg/peer/add",
data: {
siteId: site.siteId,
name: site.name,
endpoint: site.endpoint,
publicKey: site.publicKey,
serverIP: site.address,

View File

@@ -8,6 +8,7 @@ export async function addPeer(
clientId: number,
peer: {
siteId: number;
name: string;
publicKey: string;
endpoint: string;
relayEndpoint: string;
@@ -34,6 +35,7 @@ export async function addPeer(
type: "olm/wg/peer/add",
data: {
siteId: peer.siteId,
name: peer.name,
publicKey: peer.publicKey,
endpoint: peer.endpoint,
relayEndpoint: peer.relayEndpoint,

View File

@@ -328,23 +328,27 @@ export async function updateSiteResource(
}
export async function handleMessagingForUpdatedSiteResource(
existingSiteResource: SiteResource,
existingSiteResource: SiteResource | undefined,
updatedSiteResource: SiteResource,
site: { siteId: number; orgId: string },
trx: Transaction
) {
const { mergedAllClients } =
await rebuildClientAssociationsFromSiteResource(
existingSiteResource, // we want to rebuild based on the existing resource then we will apply the change to the destination below
existingSiteResource || updatedSiteResource, // we want to rebuild based on the existing resource then we will apply the change to the destination below
trx
);
// after everything is rebuilt above we still need to update the targets and remote subnets if the destination changed
const destinationChanged =
existingSiteResource &&
existingSiteResource.destination !== updatedSiteResource.destination;
const aliasChanged =
existingSiteResource &&
existingSiteResource.alias !== updatedSiteResource.alias;
// if the existingSiteResource is undefined (new resource) we don't need to do anything here, the rebuild above handled it all
if (destinationChanged || aliasChanged) {
const [newt] = await trx
.select()

View File

@@ -14,6 +14,7 @@ import m6 from "./scriptsPg/1.10.2";
import m7 from "./scriptsPg/1.11.0";
import m8 from "./scriptsPg/1.11.1";
import m9 from "./scriptsPg/1.12.0";
import m10 from "./scriptsPg/1.13.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -28,7 +29,8 @@ const migrations = [
{ version: "1.10.2", run: m6 },
{ version: "1.11.0", run: m7 },
{ version: "1.11.1", run: m8 },
{ version: "1.12.0", run: m9 }
{ version: "1.12.0", run: m9 },
{ version: "1.13.0", run: m10 },
// Add new migrations here as they are created
] as {
version: string;

View File

@@ -32,6 +32,7 @@ import m27 from "./scriptsSqlite/1.10.2";
import m28 from "./scriptsSqlite/1.11.0";
import m29 from "./scriptsSqlite/1.11.1";
import m30 from "./scriptsSqlite/1.12.0";
import m31 from "./scriptsSqlite/1.13.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -62,7 +63,8 @@ const migrations = [
{ version: "1.10.2", run: m27 },
{ version: "1.11.0", run: m28 },
{ version: "1.11.1", run: m29 },
{ version: "1.12.0", run: m30 }
{ version: "1.12.0", run: m30 },
{ version: "1.13.0", run: m31 }
// Add new migrations here as they are created
] as const;

View File

@@ -0,0 +1,380 @@
import { db } from "@server/db/pg/driver";
import { sql } from "drizzle-orm";
import { __DIRNAME } from "@server/lib/consts";
import { readFileSync } from "fs";
import { join } from "path";
const version = "1.13.0";
const dev = process.env.ENVIRONMENT !== "prod";
let file;
if (!dev) {
file = join(__DIRNAME, "names.json");
} else {
file = join("server/db/names.json");
}
export const names = JSON.parse(readFileSync(file, "utf-8"));
export function generateName(): string {
const name = (
names.descriptors[
Math.floor(Math.random() * names.descriptors.length)
] +
"-" +
names.animals[Math.floor(Math.random() * names.animals.length)]
)
.toLowerCase()
.replace(/\s/g, "-");
// clean out any non-alphanumeric characters except for dashes
return name.replace(/[^a-z0-9-]/g, "");
}
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
await db.execute(sql`BEGIN`);
await db.execute(sql`
CREATE TABLE "clientSiteResources" (
"clientId" integer NOT NULL,
"siteResourceId" integer NOT NULL
);
`);
await db.execute(sql`
CREATE TABLE "clientSiteResourcesAssociationsCache" (
"clientId" integer NOT NULL,
"siteResourceId" integer NOT NULL
);
`);
await db.execute(sql`
CREATE TABLE "deviceWebAuthCodes" (
"codeId" serial PRIMARY KEY NOT NULL,
"code" text NOT NULL,
"ip" text,
"city" text,
"deviceName" text,
"applicationName" text NOT NULL,
"expiresAt" bigint NOT NULL,
"createdAt" bigint NOT NULL,
"verified" boolean DEFAULT false NOT NULL,
"userId" varchar,
CONSTRAINT "deviceWebAuthCodes_code_unique" UNIQUE("code")
);
`);
await db.execute(sql`
CREATE TABLE "roleSiteResources" (
"roleId" integer NOT NULL,
"siteResourceId" integer NOT NULL
);
`);
await db.execute(sql`
CREATE TABLE "userSiteResources" (
"userId" varchar NOT NULL,
"siteResourceId" integer NOT NULL
);
`);
await db.execute(
sql`ALTER TABLE "clientSites" RENAME TO "clientSitesAssociationsCache";`
);
await db.execute(
sql`ALTER TABLE "clients" RENAME COLUMN "id" TO "clientId";`
);
await db.execute(
sql`ALTER TABLE "siteResources" RENAME COLUMN "destinationIp" TO "destination";`
);
await db.execute(
sql`ALTER TABLE "clientSitesAssociationsCache" DROP CONSTRAINT "clientSites_clientId_clients_id_fk";`
);
await db.execute(
sql`ALTER TABLE "clientSitesAssociationsCache" DROP CONSTRAINT "clientSites_siteId_sites_siteId_fk";`
);
await db.execute(
sql`ALTER TABLE "olms" DROP CONSTRAINT "olms_clientId_clients_id_fk";`
);
await db.execute(
sql`ALTER TABLE "roleClients" DROP CONSTRAINT "roleClients_clientId_clients_id_fk";`
);
await db.execute(
sql`ALTER TABLE "userClients" DROP CONSTRAINT "userClients_clientId_clients_id_fk";`
);
await db.execute(
sql`ALTER TABLE "siteResources" ALTER COLUMN "protocol" DROP NOT NULL;`
);
await db.execute(
sql`ALTER TABLE "siteResources" ALTER COLUMN "proxyPort" DROP NOT NULL;`
);
await db.execute(
sql`ALTER TABLE "siteResources" ALTER COLUMN "destinationPort" DROP NOT NULL;`
);
await db.execute(
sql`ALTER TABLE "clientSitesAssociationsCache" ADD COLUMN "publicKey" varchar;`
);
await db.execute(sql`ALTER TABLE "clients" ADD COLUMN "userId" text;`);
await db.execute(
sql`ALTER TABLE "clients" ADD COLUMN "niceId" varchar NOT NULL DEFAULT 'PLACEHOLDER';`
);
await db.execute(sql`ALTER TABLE "clients" ADD COLUMN "olmId" text;`);
await db.execute(sql`ALTER TABLE "olms" ADD COLUMN "agent" text;`);
await db.execute(sql`ALTER TABLE "olms" ADD COLUMN "name" varchar;`);
await db.execute(sql`ALTER TABLE "olms" ADD COLUMN "userId" text;`);
await db.execute(
sql`ALTER TABLE "orgs" ADD COLUMN "utilitySubnet" varchar;`
);
await db.execute(
sql`ALTER TABLE "session" ADD COLUMN "deviceAuthUsed" boolean DEFAULT false NOT NULL;`
);
await db.execute(
sql`ALTER TABLE "siteResources" ADD COLUMN "mode" varchar NOT NULL DEFAULT 'host';`
);
await db.execute(
sql`ALTER TABLE "siteResources" ADD COLUMN "alias" varchar;`
);
await db.execute(
sql`ALTER TABLE "siteResources" ADD COLUMN "aliasAddress" varchar;`
);
await db.execute(
sql`ALTER TABLE "targetHealthCheck" ADD COLUMN "hcTlsServerName" text;`
);
await db.execute(
sql`ALTER TABLE "clientSiteResources" ADD CONSTRAINT "clientSiteResources_clientId_clients_clientId_fk" FOREIGN KEY ("clientId") REFERENCES "public"."clients"("clientId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "clientSiteResources" ADD CONSTRAINT "clientSiteResources_siteResourceId_siteResources_siteResourceId_fk" FOREIGN KEY ("siteResourceId") REFERENCES "public"."siteResources"("siteResourceId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "deviceWebAuthCodes" ADD CONSTRAINT "deviceWebAuthCodes_userId_user_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "roleSiteResources" ADD CONSTRAINT "roleSiteResources_roleId_roles_roleId_fk" FOREIGN KEY ("roleId") REFERENCES "public"."roles"("roleId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "roleSiteResources" ADD CONSTRAINT "roleSiteResources_siteResourceId_siteResources_siteResourceId_fk" FOREIGN KEY ("siteResourceId") REFERENCES "public"."siteResources"("siteResourceId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "userSiteResources" ADD CONSTRAINT "userSiteResources_userId_user_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "userSiteResources" ADD CONSTRAINT "userSiteResources_siteResourceId_siteResources_siteResourceId_fk" FOREIGN KEY ("siteResourceId") REFERENCES "public"."siteResources"("siteResourceId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "clients" ADD CONSTRAINT "clients_userId_user_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "olms" ADD CONSTRAINT "olms_clientId_clients_clientId_fk" FOREIGN KEY ("clientId") REFERENCES "public"."clients"("clientId") ON DELETE set null ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "olms" ADD CONSTRAINT "olms_userId_user_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "roleClients" ADD CONSTRAINT "roleClients_clientId_clients_clientId_fk" FOREIGN KEY ("clientId") REFERENCES "public"."clients"("clientId") ON DELETE cascade ON UPDATE no action;`
);
await db.execute(
sql`ALTER TABLE "userClients" ADD CONSTRAINT "userClients_clientId_clients_clientId_fk" FOREIGN KEY ("clientId") REFERENCES "public"."clients"("clientId") ON DELETE cascade ON UPDATE no action;`
);
// set 100.96.128.0/24 as the utility subnet on all of the orgs
await db.execute(
sql`UPDATE "orgs" SET "utilitySubnet" = '100.96.128.0/24'`
);
// Query all of the sites to get their remoteSubnets
const sitesRemoteSubnetsData =
await db.execute(sql`SELECT "siteId", "remoteSubnets" FROM "sites" WHERE "remoteSubnets" IS NOT NULL
`);
const sitesRemoteSubnets = sitesRemoteSubnetsData.rows as {
siteId: number;
remoteSubnets: string | null;
}[];
await db.execute(sql`ALTER TABLE "sites" DROP COLUMN "remoteSubnets";`);
// get all of the siteResources and set the the aliasAddress to 100.96.128.x starting at .8
const siteResourcesData = await db.execute(
sql`SELECT "siteResourceId" FROM "siteResources" ORDER BY "siteResourceId" ASC`
);
const siteResources = siteResourcesData.rows as {
siteResourceId: number;
}[];
let aliasIpOctet = 8;
for (const siteResource of siteResources) {
const aliasAddress = `100.96.128.${aliasIpOctet}`;
await db.execute(sql`
UPDATE "siteResources" SET "aliasAddress" = ${aliasAddress} WHERE "siteResourceId" = ${siteResource.siteResourceId}
`);
aliasIpOctet++;
}
// For each site with remote subnets we need to create a site resource of type cidr for each remote subnet
for (const site of sitesRemoteSubnets) {
if (site.remoteSubnets) {
// Get the orgId for this site
const siteDataQuery = await db.execute(sql`
SELECT "orgId" FROM "sites" WHERE "siteId" = ${site.siteId}
`);
const siteData = siteDataQuery.rows[0] as { orgId: string } | undefined;
if (!siteData) continue;
const subnets = site.remoteSubnets.split(",");
for (const subnet of subnets) {
const niceId = generateName();
await db.execute(sql`
INSERT INTO "siteResources" ("siteId", "orgId", "niceId", "destination", "mode", "name")
VALUES (${site.siteId}, ${siteData.orgId}, ${niceId}, ${subnet.trim()}, 'cidr', 'Remote Subnet');
`);
}
}
}
// Associate clients with site resources based on their previous site access
// Get all client-site associations from the renamed clientSitesAssociationsCache table
const clientSiteAssociationsQuery = await db.execute(sql`
SELECT "clientId", "siteId" FROM "clientSitesAssociationsCache"
`);
const clientSiteAssociations = clientSiteAssociationsQuery.rows as {
clientId: number;
siteId: number;
}[];
// For each client-site association, find all site resources for that site
for (const association of clientSiteAssociations) {
const siteResourcesQuery = await db.execute(sql`
SELECT "siteResourceId" FROM "siteResources"
WHERE "siteId" = ${association.siteId}
`);
const siteResources = siteResourcesQuery.rows as {
siteResourceId: number;
}[];
// Associate the client with all site resources from this site
for (const siteResource of siteResources) {
await db.execute(sql`
INSERT INTO "clientSiteResources" ("clientId", "siteResourceId")
VALUES (${association.clientId}, ${siteResource.siteResourceId})
`);
// also associate in the clientSiteResourcesAssociationsCache table
await db.execute(sql`
INSERT INTO "clientSiteResourcesAssociationsCache" ("clientId", "siteResourceId")
VALUES (${association.clientId}, ${siteResource.siteResourceId})
`);
}
}
// Associate existing site resources with their org's admin role
const siteResourcesWithOrgQuery = await db.execute(sql`
SELECT "siteResourceId", "orgId" FROM "siteResources"
`);
const siteResourcesWithOrg = siteResourcesWithOrgQuery.rows as {
siteResourceId: number;
orgId: string;
}[];
for (const siteResource of siteResourcesWithOrg) {
const adminRoleQuery = await db.execute(sql`
SELECT "roleId" FROM "roles" WHERE "orgId" = ${siteResource.orgId} AND "isAdmin" = true LIMIT 1
`);
const adminRole = adminRoleQuery.rows[0] as
| { roleId: number }
| undefined;
if (adminRole) {
const existingQuery = await db.execute(sql`
SELECT 1 FROM "roleSiteResources"
WHERE "roleId" = ${adminRole.roleId} AND "siteResourceId" = ${siteResource.siteResourceId}
LIMIT 1
`);
if (existingQuery.rows.length === 0) {
await db.execute(sql`
INSERT INTO "roleSiteResources" ("roleId", "siteResourceId")
VALUES (${adminRole.roleId}, ${siteResource.siteResourceId})
`);
}
}
}
// Populate niceId for clients
const clientsQuery = await db.execute(
sql`SELECT "clientId" FROM "clients"`
);
const clients = clientsQuery.rows as {
clientId: number;
}[];
const usedNiceIds: string[] = [];
for (const client of clients) {
// Generate a unique name and ensure it's unique
let niceId = "";
let loops = 0;
while (true) {
if (loops > 100) {
throw new Error("Could not generate a unique name");
}
niceId = generateName();
if (!usedNiceIds.includes(niceId)) {
usedNiceIds.push(niceId);
break;
}
loops++;
}
await db.execute(sql`
UPDATE "clients" SET "niceId" = ${niceId} WHERE "clientId" = ${client.clientId}
`);
}
await db.execute(sql`COMMIT`);
console.log("Migrated database");
} catch (e) {
await db.execute(sql`ROLLBACK`);
console.log("Unable to migrate database");
console.log(e);
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,436 @@
import { __DIRNAME, APP_PATH } from "@server/lib/consts";
import Database from "better-sqlite3";
import { readFileSync } from "fs";
import path, { join } from "path";
const version = "1.13.0";
const dev = process.env.ENVIRONMENT !== "prod";
let file;
if (!dev) {
file = join(__DIRNAME, "names.json");
} else {
file = join("server/db/names.json");
}
export const names = JSON.parse(readFileSync(file, "utf-8"));
export function generateName(): string {
const name = (
names.descriptors[
Math.floor(Math.random() * names.descriptors.length)
] +
"-" +
names.animals[Math.floor(Math.random() * names.animals.length)]
)
.toLowerCase()
.replace(/\s/g, "-");
// clean out any non-alphanumeric characters except for dashes
return name.replace(/[^a-z0-9-]/g, "");
}
export default async function migration() {
console.log(`Running setup script ${version}...`);
const location = path.join(APP_PATH, "db", "db.sqlite");
const db = new Database(location);
try {
db.pragma("foreign_keys = OFF");
db.transaction(() => {
db.prepare(
`ALTER TABLE 'clientSites' RENAME TO 'clientSitesAssociationsCache';`
).run();
db.prepare(
`ALTER TABLE 'clients' RENAME COLUMN 'id' TO 'clientId';`
).run();
db.prepare(
`
CREATE TABLE 'clientSiteResources' (
'clientId' integer NOT NULL,
'siteResourceId' integer NOT NULL,
FOREIGN KEY ('clientId') REFERENCES 'clients'('clientId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('siteResourceId') REFERENCES 'siteResources'('siteResourceId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`
CREATE TABLE 'clientSiteResourcesAssociationsCache' (
'clientId' integer NOT NULL,
'siteResourceId' integer NOT NULL
);
`
).run();
db.prepare(
`
CREATE TABLE 'deviceWebAuthCodes' (
'codeId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'code' text NOT NULL,
'ip' text,
'city' text,
'deviceName' text,
'applicationName' text NOT NULL,
'expiresAt' integer NOT NULL,
'createdAt' integer NOT NULL,
'verified' integer DEFAULT false NOT NULL,
'userId' text,
FOREIGN KEY ('userId') REFERENCES 'user'('id') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`CREATE UNIQUE INDEX 'deviceWebAuthCodes_code_unique' ON 'deviceWebAuthCodes' ('code');`
).run();
db.prepare(
`
CREATE TABLE 'roleSiteResources' (
'roleId' integer NOT NULL,
'siteResourceId' integer NOT NULL,
FOREIGN KEY ('roleId') REFERENCES 'roles'('roleId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('siteResourceId') REFERENCES 'siteResources'('siteResourceId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`
CREATE TABLE 'userSiteResources' (
'userId' text NOT NULL,
'siteResourceId' integer NOT NULL,
FOREIGN KEY ('userId') REFERENCES 'user'('id') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('siteResourceId') REFERENCES 'siteResources'('siteResourceId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`
CREATE TABLE '__new_clientSitesAssociationsCache' (
'clientId' integer NOT NULL,
'siteId' integer NOT NULL,
'isRelayed' integer DEFAULT false NOT NULL,
'endpoint' text,
'publicKey' text
);
`
).run();
db.prepare(
`INSERT INTO '__new_clientSitesAssociationsCache'("clientId", "siteId", "isRelayed", "endpoint", "publicKey") SELECT "clientId", "siteId", "isRelayed", "endpoint", NULL FROM 'clientSitesAssociationsCache';`
).run();
db.prepare(`DROP TABLE 'clientSitesAssociationsCache';`).run();
db.prepare(
`ALTER TABLE '__new_clientSitesAssociationsCache' RENAME TO 'clientSitesAssociationsCache';`
).run();
db.prepare(
`ALTER TABLE 'clients' ADD 'userId' text REFERENCES 'user'('id');`
).run();
db.prepare(
`ALTER TABLE 'clients' ADD COLUMN 'niceId' TEXT NOT NULL DEFAULT 'PLACEHOLDER';`
).run();
db.prepare(`ALTER TABLE 'clients' ADD 'olmId' text;`).run();
db.prepare(
`
CREATE TABLE '__new_siteResources' (
'siteResourceId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'siteId' integer NOT NULL,
'orgId' text NOT NULL,
'niceId' text NOT NULL,
'name' text NOT NULL,
'mode' text NOT NULL,
'protocol' text,
'proxyPort' integer,
'destinationPort' integer,
'destination' text NOT NULL,
'enabled' integer DEFAULT true NOT NULL,
'alias' text,
'aliasAddress' text,
FOREIGN KEY ('siteId') REFERENCES 'sites'('siteId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`INSERT INTO '__new_siteResources'("siteResourceId", "siteId", "orgId", "niceId", "name", "mode", "protocol", "proxyPort", "destinationPort", "destination", "enabled", "alias", "aliasAddress") SELECT "siteResourceId", "siteId", "orgId", "niceId", "name", 'host', "protocol", "proxyPort", "destinationPort", "destinationIp", "enabled", NULL, NULL FROM 'siteResources';`
).run();
db.prepare(`DROP TABLE 'siteResources';`).run();
db.prepare(
`ALTER TABLE '__new_siteResources' RENAME TO 'siteResources';`
).run();
db.prepare(
`
CREATE TABLE '__new_olms' (
'id' text PRIMARY KEY NOT NULL,
'secretHash' text NOT NULL,
'dateCreated' text NOT NULL,
'version' text,
'agent' text,
'name' text,
'clientId' integer,
'userId' text,
FOREIGN KEY ('clientId') REFERENCES 'clients'('clientId') ON UPDATE no action ON DELETE set null,
FOREIGN KEY ('userId') REFERENCES 'user'('id') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`INSERT INTO '__new_olms'("id", "secretHash", "dateCreated", "version", "agent", "name", "clientId", "userId") SELECT "id", "secretHash", "dateCreated", "version", NULL, NULL, "clientId", NULL FROM 'olms';`
).run();
db.prepare(`DROP TABLE 'olms';`).run();
db.prepare(`ALTER TABLE '__new_olms' RENAME TO 'olms';`).run();
db.prepare(
`
CREATE TABLE '__new_roleClients' (
'roleId' integer NOT NULL,
'clientId' integer NOT NULL,
FOREIGN KEY ('roleId') REFERENCES 'roles'('roleId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('clientId') REFERENCES 'clients'('clientId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`INSERT INTO '__new_roleClients'("roleId", "clientId") SELECT "roleId", "clientId" FROM 'roleClients';`
).run();
db.prepare(`DROP TABLE 'roleClients';`).run();
db.prepare(
`ALTER TABLE '__new_roleClients' RENAME TO 'roleClients';`
).run();
db.prepare(
`
CREATE TABLE '__new_userClients' (
'userId' text NOT NULL,
'clientId' integer NOT NULL,
FOREIGN KEY ('userId') REFERENCES 'user'('id') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('clientId') REFERENCES 'clients'('clientId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`INSERT INTO '__new_userClients'("userId", "clientId") SELECT "userId", "clientId" FROM 'userClients';`
).run();
db.prepare(`DROP TABLE 'userClients';`).run();
db.prepare(
`ALTER TABLE '__new_userClients' RENAME TO 'userClients';`
).run();
db.prepare(`ALTER TABLE 'orgs' ADD 'utilitySubnet' text;`).run();
db.prepare(
`ALTER TABLE 'session' ADD 'deviceAuthUsed' integer DEFAULT false NOT NULL;`
).run();
db.prepare(
`ALTER TABLE 'targetHealthCheck' ADD 'hcTlsServerName' text;`
).run();
// set 100.96.128.0/24 as the utility subnet on all of the orgs
db.prepare(
`UPDATE 'orgs' SET 'utilitySubnet' = '100.96.128.0/24'`
).run();
// Query all of the sites to get their remoteSubnets before dropping the column
const sitesRemoteSubnets = db
.prepare(
`SELECT siteId, remoteSubnets FROM 'sites' WHERE remoteSubnets IS NOT NULL`
)
.all() as {
siteId: number;
remoteSubnets: string | null;
}[];
db.prepare(
`ALTER TABLE 'sites' DROP COLUMN 'remoteSubnets';`
).run();
// get all of the siteResources and set the aliasAddress to 100.96.128.x starting at .8
const siteResourcesForAlias = db
.prepare(
`SELECT siteResourceId FROM 'siteResources' ORDER BY siteResourceId ASC`
)
.all() as {
siteResourceId: number;
}[];
const updateAliasAddress = db.prepare(
`UPDATE 'siteResources' SET aliasAddress = ? WHERE siteResourceId = ?`
);
let aliasIpOctet = 8;
for (const siteResource of siteResourcesForAlias) {
const aliasAddress = `100.96.128.${aliasIpOctet}`;
updateAliasAddress.run(aliasAddress, siteResource.siteResourceId);
aliasIpOctet++;
}
// For each site with remote subnets we need to create a site resource of type cidr for each remote subnet
const insertCidrResource = db.prepare(
`INSERT INTO 'siteResources' ('siteId', 'destination', 'mode', 'name', 'orgId', 'niceId')
SELECT ?, ?, 'cidr', 'Remote Subnet', orgId, ? FROM 'sites' WHERE siteId = ?`
);
for (const site of sitesRemoteSubnets) {
if (site.remoteSubnets) {
const subnets = site.remoteSubnets.split(",");
for (const subnet of subnets) {
// Generate a unique niceId for each new site resource
let niceId = generateName();
insertCidrResource.run(site.siteId, subnet.trim(), niceId, site.siteId);
}
}
}
// Associate clients with site resources based on their previous site access
// Get all client-site associations from the renamed clientSitesAssociationsCache table
const clientSiteAssociations = db
.prepare(
`SELECT clientId, siteId FROM 'clientSitesAssociationsCache'`
)
.all() as {
clientId: number;
siteId: number;
}[];
const getSiteResources = db.prepare(
`SELECT siteResourceId FROM 'siteResources' WHERE siteId = ?`
);
const insertClientSiteResource = db.prepare(
`INSERT INTO 'clientSiteResources' ('clientId', 'siteResourceId') VALUES (?, ?)`
);
// create a clientSiteResourcesAssociationsCache entry for each existing association as well
const insertClientSiteResourceCache = db.prepare(
`INSERT INTO 'clientSiteResourcesAssociationsCache' ('clientId', 'siteResourceId') VALUES (?, ?)`
);
// For each client-site association, find all site resources for that site
for (const association of clientSiteAssociations) {
const siteResources = getSiteResources.all(
association.siteId
) as {
siteResourceId: number;
}[];
// Associate the client with all site resources from this site
for (const siteResource of siteResources) {
insertClientSiteResource.run(
association.clientId,
siteResource.siteResourceId
);
insertClientSiteResourceCache.run(
association.clientId,
siteResource.siteResourceId
);
}
}
// Associate existing site resources with their org's admin role
const siteResourcesWithOrg = db
.prepare(`SELECT siteResourceId, orgId FROM 'siteResources'`)
.all() as {
siteResourceId: number;
orgId: string;
}[];
const getAdminRole = db.prepare(
`SELECT roleId FROM 'roles' WHERE orgId = ? AND isAdmin = 1 LIMIT 1`
);
const checkExistingAssociation = db.prepare(
`SELECT 1 FROM 'roleSiteResources' WHERE roleId = ? AND siteResourceId = ? LIMIT 1`
);
const insertRoleSiteResource = db.prepare(
`INSERT INTO 'roleSiteResources' ('roleId', 'siteResourceId') VALUES (?, ?)`
);
for (const siteResource of siteResourcesWithOrg) {
const adminRole = getAdminRole.get(siteResource.orgId) as
| { roleId: number }
| undefined;
if (adminRole) {
const existing = checkExistingAssociation.get(
adminRole.roleId,
siteResource.siteResourceId
);
if (!existing) {
insertRoleSiteResource.run(
adminRole.roleId,
siteResource.siteResourceId
);
}
}
}
// Populate niceId for clients
const clients = db
.prepare(`SELECT clientId FROM 'clients'`)
.all() as {
clientId: number;
}[];
const usedNiceIds: string[] = [];
for (const clientId of clients) {
// Generate a unique name and ensure it's unique
let niceId = "";
let loops = 0;
while (true) {
if (loops > 100) {
throw new Error("Could not generate a unique name");
}
niceId = generateName();
if (!usedNiceIds.includes(niceId)) {
usedNiceIds.push(niceId);
break;
}
loops++;
}
db.prepare(
`UPDATE clients SET niceId = ? WHERE clientId = ?`
).run(niceId, clientId.clientId);
}
})();
db.pragma("foreign_keys = ON");
console.log(`Migrated database`);
} catch (e) {
console.log("Failed to migrate db:", e);
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -231,12 +231,22 @@ export default function ExitNodesTable({
},
cell: ({ row }) => {
const originalRow = row.original;
return originalRow.version || "-";
return (
<div className="flex items-center space-x-1">
{originalRow.version && originalRow.version ? (
<Badge variant="secondary">
{"v" + originalRow.version}
</Badge>
) : (
"-"
)}
</div>
);
}
},
{
id: "actions",
header: () => (<span className="p-3">{t("actions")}</span>),
header: () => <span className="p-3">{t("actions")}</span>,
cell: ({ row }) => {
const nodeRow = row.original;
const remoteExitNodeId = nodeRow.id;
@@ -295,9 +305,7 @@ export default function ExitNodesTable({
}}
dialog={
<div>
<p>
{t("remoteExitNodeQuestionRemove")}
</p>
<p>{t("remoteExitNodeQuestionRemove")}</p>
<p>{t("remoteExitNodeMessageRemove")}</p>
</div>

View File

@@ -51,6 +51,7 @@ export default function CredentialsPage() {
null
);
const [showCredentialsAlert, setShowCredentialsAlert] = useState(false);
const [shouldDisconnect, setShouldDisconnect] = useState(true);
const { licenseStatus, isUnlocked } = useLicenseStatusContext();
const subscription = useSubscriptionStatusContext();
@@ -71,7 +72,8 @@ export default function CredentialsPage() {
const rekeyRes = await api.post(
`/re-key/${client?.clientId}/regenerate-client-secret`,
{
secret: data.olmSecret
secret: data.olmSecret,
disconnect: shouldDisconnect
}
);
@@ -180,12 +182,27 @@ export default function CredentialsPage() {
</SettingsSectionBody>
{build !== "oss" && (
<SettingsSectionFooter>
<Button
onClick={() => setModalOpen(true)}
disabled={isSecurityFeatureDisabled()}
>
{t("regenerateCredentialsButton")}
</Button>
<div className="flex gap-2">
<Button
variant="outline"
onClick={() => {
setShouldDisconnect(false);
setModalOpen(true);
}}
disabled={isSecurityFeatureDisabled()}
>
{t("regenerateCredentialsButton")}
</Button>
<Button
onClick={() => {
setShouldDisconnect(true);
setModalOpen(true);
}}
disabled={isSecurityFeatureDisabled()}
>
{t("clientRegenerateAndDisconnect")}
</Button>
</div>
</SettingsSectionFooter>
)}
</SettingsSection>
@@ -204,11 +221,38 @@ export default function CredentialsPage() {
}}
dialog={
<div className="space-y-2">
<p>{t("regenerateCredentialsConfirmation")}</p>
<p>{t("regenerateCredentialsWarning")}</p>
{shouldDisconnect ? (
<>
<p>
{t(
"clientRegenerateAndDisconnectConfirmation"
)}
</p>
<p>
{t(
"clientRegenerateAndDisconnectWarning"
)}
</p>
</>
) : (
<>
<p>
{t(
"clientRegenerateCredentialsConfirmation"
)}
</p>
<p>
{t("clientRegenerateCredentialsWarning")}
</p>
</>
)}
</div>
}
buttonText={t("regenerateCredentialsButton")}
buttonText={
shouldDisconnect
? t("clientRegenerateAndDisconnect")
: t("regenerateCredentialsButton")
}
onConfirm={handleConfirmRegenerate}
string={getConfirmationString()}
title={t("regenerateCredentials")}

View File

@@ -99,7 +99,7 @@ export default function ProductUpdates({
: "opacity-0"
)}
>
{filteredUpdates.length > 0 && (
{filteredUpdates.length > 1 && (
<>
<BellIcon className="flex-none size-3" />
<span>
@@ -356,7 +356,7 @@ function NewVersionAvailable({
<XIcon className="size-4 flex-none" />
</button>
</div>
<div className="flex flex-col gap-2">
<div className="flex flex-col gap-0.5">
<small className="text-muted-foreground">
{t("pangolinUpdateAvailableInfo", {
version: version.pangolin.latestVersion
@@ -365,7 +365,7 @@ function NewVersionAvailable({
<a
href={version.pangolin.releaseNotes}
target="_blank"
className="inline-flex items-center gap-0.5 text-xs font-medium"
className="inline-flex items-center gap-1 text-xs font-medium"
>
<span>
{t("pangolinUpdateAvailableReleaseNotes")}