Merge pull request #2469 from fosrl/dev

1.15.4
This commit is contained in:
Owen Schwartz
2026-02-12 16:10:44 -08:00
committed by GitHub
34 changed files with 268 additions and 234 deletions

View File

@@ -1,4 +1,4 @@
name: Public Pipeline
name: Public CICD Pipeline
# CI/CD workflow for building, publishing, mirroring, signing container images and building release binaries.
# Actions are pinned to specific SHAs to reduce supply-chain risk. This workflow triggers on tag push events.

View File

@@ -34,10 +34,10 @@ jobs:
run: npm run set:oss
- name: Generate database migrations
run: npm run db:sqlite:generate
run: npm run db:generate
- name: Apply database migrations
run: npm run db:sqlite:push
run: npm run db:push
- name: Test with tsc
run: npx tsc --noEmit

3
.gitignore vendored
View File

@@ -51,4 +51,5 @@ dynamic/
scratch/
tsconfig.json
hydrateSaas.ts
CLAUDE.md
CLAUDE.md
drizzle.config.ts

View File

@@ -16,7 +16,7 @@ COPY . .
RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi && \
npm run set:$DATABASE && \
npm run set:$BUILD && \
npm run db:$DATABASE:generate && \
npm run db:generate && \
npm run build && \
npm run build:cli

View File

@@ -1,14 +0,0 @@
import { defineConfig } from "drizzle-kit";
import path from "path";
const schema = [path.join("server", "db", "pg", "schema")];
export default defineConfig({
dialect: "postgresql",
schema: schema,
out: path.join("server", "migrations"),
verbose: true,
dbCredentials: {
url: process.env.DATABASE_URL as string
}
});

View File

@@ -2060,7 +2060,7 @@
"machineClientsBannerDescription": "Machine clients are for servers and automated systems that are not associated with a specific user. They authenticate with an ID and secret, and can run with Pangolin CLI, Olm CLI, or Olm as a container.",
"machineClientsBannerPangolinCLI": "Pangolin CLI",
"machineClientsBannerOlmCLI": "Olm CLI",
"machineClientsBannerOlmContainer": "Olm Container",
"machineClientsBannerOlmContainer": "Container",
"clientsTableUserClients": "User",
"clientsTableMachineClients": "Machine",
"licenseTableValidUntil": "Valid Until",

View File

@@ -14,12 +14,9 @@
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
"dev:check": "npx tsc --noEmit && npm run format:check",
"dev:setup": "cp config/config.example.yml config/config.yml && npm run set:oss && npm run set:sqlite && npm run db:sqlite:generate && npm run db:sqlite:push",
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
"db:pg:push": "npx tsx server/db/pg/migrate.ts",
"db:sqlite:push": "npx tsx server/db/sqlite/migrate.ts",
"db:pg:studio": "drizzle-kit studio --config=./drizzle.pg.config.ts",
"db:sqlite:studio": "drizzle-kit studio --config=./drizzle.sqlite.config.ts",
"db:generate": "drizzle-kit generate --config=./drizzle.config.ts",
"db:push": "npx tsx server/db/migrate.ts",
"db:studio": "drizzle-kit studio --config=./drizzle.config.ts",
"db:clear-migrations": "rm -rf server/migrations",
"set:oss": "echo 'export const build = \"oss\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.oss.json tsconfig.json",
"set:saas": "echo 'export const build = \"saas\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.saas.json tsconfig.json",

View File

@@ -56,15 +56,15 @@ Ensure drizzle-kit is installed.
You must have a connection string in your config file, as shown above.
```bash
npm run db:pg:generate
npm run db:pg:push
npm run db:generate
npm run db:push
```
### SQLite
```bash
npm run db:sqlite:generate
npm run db:sqlite:push
npm run db:generate
npm run db:push
```
## Build Time

3
server/db/migrate.ts Normal file
View File

@@ -0,0 +1,3 @@
import { runMigrations } from "./";
await runMigrations();

View File

@@ -1,3 +1,4 @@
export * from "./driver";
export * from "./schema/schema";
export * from "./schema/privateSchema";
export * from "./migrate";

View File

@@ -4,7 +4,7 @@ import path from "path";
const migrationsFolder = path.join("server/migrations");
const runMigrations = async () => {
export const runMigrations = async () => {
console.log("Running migrations...");
try {
await migrate(db as any, {
@@ -17,5 +17,3 @@ const runMigrations = async () => {
process.exit(1);
}
};
runMigrations();

View File

@@ -142,7 +142,8 @@ export const resources = pgTable("resources", {
}).default("forced"), // "forced" = always show, "automatic" = only when down
maintenanceTitle: text("maintenanceTitle"),
maintenanceMessage: text("maintenanceMessage"),
maintenanceEstimatedTime: text("maintenanceEstimatedTime")
maintenanceEstimatedTime: text("maintenanceEstimatedTime"),
postAuthPath: text("postAuthPath")
});
export const targets = pgTable("targets", {

View File

@@ -1,3 +1,4 @@
export * from "./driver";
export * from "./schema/schema";
export * from "./schema/privateSchema";
export * from "./migrate";

View File

@@ -4,7 +4,7 @@ import path from "path";
const migrationsFolder = path.join("server/migrations");
const runMigrations = async () => {
export const runMigrations = async () => {
console.log("Running migrations...");
try {
migrate(db as any, {
@@ -16,5 +16,3 @@ const runMigrations = async () => {
process.exit(1);
}
};
runMigrations();

View File

@@ -162,7 +162,8 @@ export const resources = sqliteTable("resources", {
}).default("forced"), // "forced" = always show, "automatic" = only when down
maintenanceTitle: text("maintenanceTitle"),
maintenanceMessage: text("maintenanceMessage"),
maintenanceEstimatedTime: text("maintenanceEstimatedTime")
maintenanceEstimatedTime: text("maintenanceEstimatedTime"),
postAuthPath: text("postAuthPath")
});
export const targets = sqliteTable("targets", {

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.15.3";
export const APP_VERSION = "1.15.4";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -0,0 +1,18 @@
/**
* Normalizes a post-authentication path for safe use when building redirect URLs.
* Returns a path that starts with / and does not allow open redirects (no //, no :).
*/
export function normalizePostAuthPath(path: string | null | undefined): string | null {
if (path == null || typeof path !== "string") {
return null;
}
const trimmed = path.trim();
if (trimmed === "") {
return null;
}
// Reject protocol-relative (//) or scheme (:) to avoid open redirect
if (trimmed.includes("//") || trimmed.includes(":")) {
return null;
}
return trimmed.startsWith("/") ? trimmed : `/${trimmed}`;
}

View File

@@ -18,6 +18,113 @@ import logger from "@server/logger";
import { db, idp, idpOrg, loginPage, loginPageBranding, loginPageBrandingOrg, loginPageOrg, orgs, resources, roles } from "@server/db";
import { eq } from "drizzle-orm";
/**
* Get the maximum allowed retention days for a given tier
* Returns null for enterprise tier (unlimited)
*/
function getMaxRetentionDaysForTier(tier: Tier | null): number | null {
if (!tier) {
return 3; // Free tier
}
switch (tier) {
case "tier1":
return 7;
case "tier2":
return 30;
case "tier3":
return 90;
case "enterprise":
return null; // No limit
default:
return 3; // Default to free tier limit
}
}
/**
* Cap retention days to the maximum allowed for the given tier
*/
async function capRetentionDays(
orgId: string,
tier: Tier | null
): Promise<void> {
const maxRetentionDays = getMaxRetentionDaysForTier(tier);
// If there's no limit (enterprise tier), no capping needed
if (maxRetentionDays === null) {
logger.debug(
`No retention day limit for org ${orgId} on tier ${tier || "free"}`
);
return;
}
// Get current org settings
const [org] = await db
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId));
if (!org) {
logger.warn(`Org ${orgId} not found when capping retention days`);
return;
}
const updates: Partial<typeof orgs.$inferInsert> = {};
let needsUpdate = false;
// Cap request log retention if it exceeds the limit
if (
org.settingsLogRetentionDaysRequest !== null &&
org.settingsLogRetentionDaysRequest > maxRetentionDays
) {
updates.settingsLogRetentionDaysRequest = maxRetentionDays;
needsUpdate = true;
logger.info(
`Capping request log retention from ${org.settingsLogRetentionDaysRequest} to ${maxRetentionDays} days for org ${orgId}`
);
}
// Cap access log retention if it exceeds the limit
if (
org.settingsLogRetentionDaysAccess !== null &&
org.settingsLogRetentionDaysAccess > maxRetentionDays
) {
updates.settingsLogRetentionDaysAccess = maxRetentionDays;
needsUpdate = true;
logger.info(
`Capping access log retention from ${org.settingsLogRetentionDaysAccess} to ${maxRetentionDays} days for org ${orgId}`
);
}
// Cap action log retention if it exceeds the limit
if (
org.settingsLogRetentionDaysAction !== null &&
org.settingsLogRetentionDaysAction > maxRetentionDays
) {
updates.settingsLogRetentionDaysAction = maxRetentionDays;
needsUpdate = true;
logger.info(
`Capping action log retention from ${org.settingsLogRetentionDaysAction} to ${maxRetentionDays} days for org ${orgId}`
);
}
// Apply updates if needed
if (needsUpdate) {
await db
.update(orgs)
.set(updates)
.where(eq(orgs.orgId, orgId));
logger.info(
`Successfully capped retention days for org ${orgId} to max ${maxRetentionDays} days`
);
} else {
logger.debug(
`No retention day capping needed for org ${orgId}`
);
}
}
export async function handleTierChange(
orgId: string,
newTier: SubscriptionType | null,
@@ -40,6 +147,9 @@ export async function handleTierChange(
logger.info(
`Org ${orgId} is reverting to free tier, disabling all paid features`
);
// Cap retention days to free tier limits
await capRetentionDays(orgId, null);
// Disable all features in the tier matrix
for (const [featureKey] of Object.entries(tierMatrix)) {
const feature = featureKey as TierFeature;
@@ -57,6 +167,9 @@ export async function handleTierChange(
// Get the tier (cast as Tier since we've ruled out "license" and null)
const tier = newTier as Tier;
// Cap retention days to the new tier's limits
await capRetentionDays(orgId, tier);
// Check each feature in the tier matrix
for (const [featureKey, allowedTiers] of Object.entries(tierMatrix)) {
const feature = featureKey as TierFeature;

View File

@@ -14,6 +14,7 @@ import { verifyResourceAccessToken } from "@server/auth/verifyResourceAccessToke
import config from "@server/lib/config";
import stoi from "@server/lib/stoi";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
import { normalizePostAuthPath } from "@server/lib/normalizePostAuthPath";
const authWithAccessTokenBodySchema = z.strictObject({
accessToken: z.string(),
@@ -164,10 +165,16 @@ export async function authWithAccessToken(
requestIp: req.ip
});
let redirectUrl = `${resource.ssl ? "https" : "http"}://${resource.fullDomain}`;
const postAuthPath = normalizePostAuthPath(resource.postAuthPath);
if (postAuthPath) {
redirectUrl = redirectUrl + postAuthPath;
}
return response<AuthWithAccessTokenResponse>(res, {
data: {
session: token,
redirectUrl: `${resource.ssl ? "https" : "http"}://${resource.fullDomain}`
redirectUrl
},
success: true,
error: false,

View File

@@ -36,7 +36,8 @@ const createHttpResourceSchema = z
http: z.boolean(),
protocol: z.enum(["tcp", "udp"]),
domainId: z.string(),
stickySession: z.boolean().optional()
stickySession: z.boolean().optional(),
postAuthPath: z.string().nullable().optional()
})
.refine(
(data) => {
@@ -188,7 +189,7 @@ async function createHttpResource(
);
}
const { name, domainId } = parsedBody.data;
const { name, domainId, postAuthPath } = parsedBody.data;
const subdomain = parsedBody.data.subdomain;
const stickySession = parsedBody.data.stickySession;
@@ -255,7 +256,8 @@ async function createHttpResource(
http: true,
protocol: "tcp",
ssl: true,
stickySession: stickySession
stickySession: stickySession,
postAuthPath: postAuthPath
})
.returning();

View File

@@ -35,6 +35,7 @@ export type GetResourceAuthInfoResponse = {
whitelist: boolean;
skipToIdpId: number | null;
orgId: string;
postAuthPath: string | null;
};
export async function getResourceAuthInfo(
@@ -147,7 +148,8 @@ export async function getResourceAuthInfo(
url,
whitelist: resource.emailWhitelistEnabled,
skipToIdpId: resource.skipToIdpId,
orgId: resource.orgId
orgId: resource.orgId,
postAuthPath: resource.postAuthPath ?? null
},
success: true,
error: false,

View File

@@ -55,7 +55,8 @@ const updateHttpResourceBodySchema = z
maintenanceModeType: z.enum(["forced", "automatic"]).optional(),
maintenanceTitle: z.string().max(255).nullable().optional(),
maintenanceMessage: z.string().max(2000).nullable().optional(),
maintenanceEstimatedTime: z.string().max(100).nullable().optional()
maintenanceEstimatedTime: z.string().max(100).nullable().optional(),
postAuthPath: z.string().nullable().optional()
})
.refine((data) => Object.keys(data).length > 0, {
error: "At least one field must be provided for update"

1
server/setup/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
migrations.ts

View File

@@ -1,162 +0,0 @@
#! /usr/bin/env node
import { migrate } from "drizzle-orm/node-postgres/migrator";
import { db } from "../db/pg";
import semver from "semver";
import { versionMigrations } from "../db/pg";
import { __DIRNAME, APP_VERSION } from "@server/lib/consts";
import path from "path";
import m1 from "./scriptsPg/1.6.0";
import m2 from "./scriptsPg/1.7.0";
import m3 from "./scriptsPg/1.8.0";
import m4 from "./scriptsPg/1.9.0";
import m5 from "./scriptsPg/1.10.0";
import m6 from "./scriptsPg/1.10.2";
import m7 from "./scriptsPg/1.11.0";
import m8 from "./scriptsPg/1.11.1";
import m9 from "./scriptsPg/1.12.0";
import m10 from "./scriptsPg/1.13.0";
import m11 from "./scriptsPg/1.14.0";
import m12 from "./scriptsPg/1.15.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
// Define the migration list with versions and their corresponding functions
const migrations = [
{ version: "1.6.0", run: m1 },
{ version: "1.7.0", run: m2 },
{ version: "1.8.0", run: m3 },
{ version: "1.9.0", run: m4 },
{ version: "1.10.0", run: m5 },
{ version: "1.10.2", run: m6 },
{ version: "1.11.0", run: m7 },
{ version: "1.11.1", run: m8 },
{ version: "1.12.0", run: m9 },
{ version: "1.13.0", run: m10 },
{ version: "1.14.0", run: m11 },
{ version: "1.15.0", run: m12 }
// Add new migrations here as they are created
] as {
version: string;
run: () => Promise<void>;
}[];
await run();
async function run() {
// run the migrations
await runMigrations();
}
export async function runMigrations() {
if (process.env.DISABLE_MIGRATIONS) {
console.log("Migrations are disabled. Skipping...");
return;
}
try {
const appVersion = APP_VERSION;
// determine if the migrations table exists
const exists = await db
.select()
.from(versionMigrations)
.limit(1)
.execute()
.then((res) => res.length > 0)
.catch(() => false);
if (exists) {
console.log("Migrations table exists, running scripts...");
await executeScripts();
} else {
console.log("Migrations table does not exist, creating it...");
console.log("Running migrations...");
try {
await migrate(db, {
migrationsFolder: path.join(__DIRNAME, "init") // put here during the docker build
});
console.log("Migrations completed successfully.");
} catch (error) {
console.error("Error running migrations:", error);
}
await db
.insert(versionMigrations)
.values({
version: appVersion,
executedAt: Date.now()
})
.execute();
}
} catch (e) {
console.error("Error running migrations:", e);
await new Promise((resolve) =>
setTimeout(resolve, 1000 * 60 * 60 * 24 * 1)
);
}
}
async function executeScripts() {
try {
// Get the last executed version from the database
const lastExecuted = await db.select().from(versionMigrations);
// Filter and sort migrations
const pendingMigrations = lastExecuted
.map((m) => m)
.sort((a, b) => semver.compare(b.version, a.version));
const startVersion = pendingMigrations[0]?.version ?? "0.0.0";
console.log(`Starting migrations from version ${startVersion}`);
const migrationsToRun = migrations.filter((migration) =>
semver.gt(migration.version, startVersion)
);
console.log(
"Migrations to run:",
migrationsToRun.map((m) => m.version).join(", ")
);
// Run migrations in order
for (const migration of migrationsToRun) {
console.log(`Running migration ${migration.version}`);
try {
await migration.run();
// Update version in database
await db
.insert(versionMigrations)
.values({
version: migration.version,
executedAt: Date.now()
})
.execute();
console.log(
`Successfully completed migration ${migration.version}`
);
} catch (e) {
if (
e instanceof Error &&
typeof (e as any).code === "string" &&
(e as any).code === "23505"
) {
console.error("Migration has already run! Skipping...");
continue; // or return, depending on context
}
console.error(
`Failed to run migration ${migration.version}:`,
e
);
throw e;
}
}
console.log("All migrations completed successfully");
} catch (error) {
console.error("Migration process failed:", error);
throw error;
}
}

View File

@@ -18,6 +18,7 @@ import m10 from "./scriptsPg/1.13.0";
import m11 from "./scriptsPg/1.14.0";
import m12 from "./scriptsPg/1.15.0";
import m13 from "./scriptsPg/1.15.3";
import m14 from "./scriptsPg/1.15.4";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -36,7 +37,8 @@ const migrations = [
{ version: "1.13.0", run: m10 },
{ version: "1.14.0", run: m11 },
{ version: "1.15.0", run: m12 },
{ version: "1.15.3", run: m13 }
{ version: "1.15.3", run: m13 },
{ version: "1.15.4", run: m14 }
// Add new migrations here as they are created
] as {
version: string;

View File

@@ -36,6 +36,7 @@ import m31 from "./scriptsSqlite/1.13.0";
import m32 from "./scriptsSqlite/1.14.0";
import m33 from "./scriptsSqlite/1.15.0";
import m34 from "./scriptsSqlite/1.15.3";
import m35 from "./scriptsSqlite/1.15.4";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -70,7 +71,8 @@ const migrations = [
{ version: "1.13.0", run: m31 },
{ version: "1.14.0", run: m32 },
{ version: "1.15.0", run: m33 },
{ version: "1.15.3", run: m34 }
{ version: "1.15.3", run: m34 },
{ version: "1.15.4", run: m35 }
// Add new migrations here as they are created
] as const;

View File

@@ -0,0 +1,27 @@
import { db } from "@server/db/pg/driver";
import { sql } from "drizzle-orm";
import { __DIRNAME } from "@server/lib/consts";
const version = "1.15.4";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
await db.execute(sql`BEGIN`);
await db.execute(
sql`ALTER TABLE "resources" ADD COLUMN "postAuthPath" text;`
);
await db.execute(sql`COMMIT`);
console.log("Migrated database");
} catch (e) {
await db.execute(sql`ROLLBACK`);
console.log("Unable to migrate database");
console.log(e);
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,27 @@
import { __DIRNAME, APP_PATH } from "@server/lib/consts";
import Database from "better-sqlite3";
import path from "path";
const version = "1.15.4";
export default async function migration() {
console.log(`Running setup script ${version}...`);
const location = path.join(APP_PATH, "db", "db.sqlite");
const db = new Database(location);
try {
db.transaction(() => {
db.prepare(
`ALTER TABLE 'resources' ADD 'postAuthPath' text;`
).run();
})();
console.log(`Migrated database`);
} catch (e) {
console.log("Failed to migrate db:", e);
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -26,6 +26,7 @@ import type {
import { CheckOrgUserAccessResponse } from "@server/routers/org";
import OrgPolicyRequired from "@app/components/OrgPolicyRequired";
import { isOrgSubscribed } from "@app/lib/api/isOrgSubscribed";
import { normalizePostAuthPath } from "@server/lib/normalizePostAuthPath";
export const dynamic = "force-dynamic";
@@ -108,6 +109,11 @@ export default async function ResourceAuthPage(props: {
} catch (e) {}
}
const normalizedPostAuthPath = normalizePostAuthPath(authInfo.postAuthPath);
if (normalizedPostAuthPath) {
redirectUrl = new URL(authInfo.url).origin + normalizedPostAuthPath;
}
const hasAuth =
authInfo.password ||
authInfo.pincode ||

View File

@@ -303,7 +303,7 @@ export default function CreateInternalResourceDialog({
const [udpCustomPorts, setUdpCustomPorts] = useState<string>("");
const availableSites = sites.filter(
(site) => site.type === "newt" && site.subnet
(site) => site.type === "newt"
);
const form = useForm<FormData>({

View File

@@ -397,7 +397,7 @@ export default function EditInternalResourceDialog({
);
const availableSites = sites.filter(
(site) => site.type === "newt" && site.subnet
(site) => site.type === "newt"
);
const form = useForm<FormData>({

View File

@@ -37,7 +37,7 @@ export const MachineClientsBanner = ({ orgId }: MachineClientsBannerProps) => {
</Button>
</Link>
<Link
href="https://docs.pangolin.net/manage/clients/install-client#docker"
href="https://docs.pangolin.net/manage/clients/install-client#docker-pangolin-cli"
target="_blank"
rel="noopener noreferrer"
>

View File

@@ -18,11 +18,11 @@ export type CommandItem = string | { title: string; command: string };
const PLATFORMS = [
"unix",
"windows",
"docker",
"kubernetes",
"podman",
"nixos"
"nixos",
"windows"
] as const;
type Platform = (typeof PLATFORMS)[number];

View File

@@ -14,7 +14,7 @@ import { Button } from "./ui/button";
export type CommandItem = string | { title: string; command: string };
const PLATFORMS = ["unix", "windows", "docker"] as const;
const PLATFORMS = ["unix", "docker", "windows"] as const;
type Platform = (typeof PLATFORMS)[number];
@@ -43,7 +43,7 @@ export function OlmInstallCommands({
All: [
{
title: t("install"),
command: `curl -fsSL https://static.pangolin.net/get-cli.sh | bash`
command: `curl -fsSL https://static.pangolin.net/get-cli.sh | sudo bash`
},
{
title: t("run"),
@@ -51,24 +51,12 @@ export function OlmInstallCommands({
}
]
},
windows: {
x64: [
{
title: t("install"),
command: `curl -o olm.exe -L "https://github.com/fosrl/olm/releases/download/${version}/olm_windows_installer.exe"`
},
{
title: t("run"),
command: `olm.exe --id ${id} --secret ${secret} --endpoint ${endpoint}`
}
]
},
docker: {
"Docker Compose": [
`services:
olm:
image: fosrl/olm
container_name: olm
pangolin-cli:
image: fosrl/pangolin-cli
container_name: pangolin-cli
restart: unless-stopped
network_mode: host
cap_add:
@@ -77,11 +65,24 @@ export function OlmInstallCommands({
- /dev/net/tun:/dev/net/tun
environment:
- PANGOLIN_ENDPOINT=${endpoint}
- OLM_ID=${id}
- OLM_SECRET=${secret}`
- CLIENT_ID=${id}
- CLIENT_SECRET=${secret}`
],
"Docker Run": [
`docker run -dit --network host --cap-add NET_ADMIN --device /dev/net/tun:/dev/net/tun fosrl/olm --id ${id} --secret ${secret} --endpoint ${endpoint}`
`docker run -dit --network host --cap-add NET_ADMIN --device /dev/net/tun:/dev/net/tun fosrl/pangolin-cli up client --id ${id} --secret ${secret} --endpoint ${endpoint} --attach`
]
},
windows: {
x64: [
{
title: t("install"),
command: `# Download and run the installer to install Olm first\n
curl -o olm.exe -L "https://github.com/fosrl/olm/releases/download/${version}/olm_windows_installer.exe"`
},
{
title: t("run"),
command: `olm.exe --id ${id} --secret ${secret} --endpoint ${endpoint}`
}
]
}
};