Compare commits

..

1 Commits
dev ... 1.15.0

Author SHA1 Message Date
Owen Schwartz
40f2262f3e Merge pull request #2309 from fosrl/dev
1.15.0
2026-01-23 10:40:16 -08:00
12 changed files with 2395 additions and 249 deletions

View File

@@ -44,9 +44,19 @@ updates:
schedule: schedule:
interval: "daily" interval: "daily"
groups: groups:
patch-updates: dev-patch-updates:
dependency-type: "development"
update-types: update-types:
- "patch" - "patch"
minor-updates: dev-minor-updates:
dependency-type: "development"
update-types: update-types:
- "minor" - "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"

View File

@@ -482,77 +482,14 @@ jobs:
echo "==> cosign sign (key) --recursive ${REF}" echo "==> cosign sign (key) --recursive ${REF}"
cosign sign --key env://COSIGN_PRIVATE_KEY --recursive "${REF}" cosign sign --key env://COSIGN_PRIVATE_KEY --recursive "${REF}"
# Retry wrapper for verification to handle registry propagation delays
retry_verify() {
local cmd="$1"
local attempts=6
local delay=5
local i=1
until eval "$cmd"; do
if [ $i -ge $attempts ]; then
echo "Verification failed after $attempts attempts"
return 1
fi
echo "Verification not yet available. Retry $i/$attempts after ${delay}s..."
sleep $delay
i=$((i+1))
delay=$((delay*2))
# Cap the delay to avoid very long waits
if [ $delay -gt 60 ]; then delay=60; fi
done
return 0
}
echo "==> cosign verify (public key) ${REF}" echo "==> cosign verify (public key) ${REF}"
if retry_verify "cosign verify --key env://COSIGN_PUBLIC_KEY '${REF}' -o text"; then cosign verify --key env://COSIGN_PUBLIC_KEY "${REF}" -o text
VERIFIED_INDEX=true
else
VERIFIED_INDEX=false
fi
echo "==> cosign verify (keyless policy) ${REF}" echo "==> cosign verify (keyless policy) ${REF}"
if retry_verify "cosign verify --certificate-oidc-issuer '${issuer}' --certificate-identity-regexp '${id_regex}' '${REF}' -o text"; then cosign verify \
VERIFIED_INDEX_KEYLESS=true --certificate-oidc-issuer "${issuer}" \
else --certificate-identity-regexp "${id_regex}" \
VERIFIED_INDEX_KEYLESS=false "${REF}" -o text
fi
# If index verification fails, attempt to verify child platform manifests
if [ "${VERIFIED_INDEX}" != "true" ] || [ "${VERIFIED_INDEX_KEYLESS}" != "true" ]; then
echo "Index verification not available; attempting child manifest verification for ${BASE_IMAGE}:${IMAGE_TAG}"
CHILD_VERIFIED=false
for ARCH in arm64 amd64; do
CHILD_TAG="${IMAGE_TAG}-${ARCH}"
echo "Resolving child digest for ${BASE_IMAGE}:${CHILD_TAG}"
CHILD_DIGEST="$(skopeo inspect --retry-times 3 docker://${BASE_IMAGE}:${CHILD_TAG} | jq -r '.Digest' || true)"
if [ -n "${CHILD_DIGEST}" ] && [ "${CHILD_DIGEST}" != "null" ]; then
CHILD_REF="${BASE_IMAGE}@${CHILD_DIGEST}"
echo "==> cosign verify (public key) child ${CHILD_REF}"
if retry_verify "cosign verify --key env://COSIGN_PUBLIC_KEY '${CHILD_REF}' -o text"; then
CHILD_VERIFIED=true
echo "Public key verification succeeded for child ${CHILD_REF}"
else
echo "Public key verification failed for child ${CHILD_REF}"
fi
echo "==> cosign verify (keyless policy) child ${CHILD_REF}"
if retry_verify "cosign verify --certificate-oidc-issuer '${issuer}' --certificate-identity-regexp '${id_regex}' '${CHILD_REF}' -o text"; then
CHILD_VERIFIED=true
echo "Keyless verification succeeded for child ${CHILD_REF}"
else
echo "Keyless verification failed for child ${CHILD_REF}"
fi
else
echo "No child digest found for ${BASE_IMAGE}:${CHILD_TAG}; skipping"
fi
done
if [ "${CHILD_VERIFIED}" != "true" ]; then
echo "Failed to verify index and no child manifests verified for ${BASE_IMAGE}:${IMAGE_TAG}"
exit 10
fi
fi
echo "✓ Successfully signed and verified ${BASE_IMAGE}:${IMAGE_TAG}" echo "✓ Successfully signed and verified ${BASE_IMAGE}:${IMAGE_TAG}"
done done

View File

@@ -1,43 +1,63 @@
FROM node:24-alpine AS builder FROM node:24-alpine AS builder
WORKDIR /app
ARG BUILD=oss
ARG DATABASE=sqlite
RUN apk add --no-cache python3 make g++
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci
COPY . .
RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi && \
npm run set:$DATABASE && \
npm run set:$BUILD && \
npm run db:$DATABASE:generate && \
npm run build:$DATABASE && \
npm run build:cli
# test to make sure the build output is there and error if not
RUN test -f dist/server.mjs
# Prune dev dependencies and clean up to prepare for copy to runner
RUN npm prune --omit=dev && npm cache clean --force
FROM node:24-alpine AS runner
# OCI Image Labels - Build Args for dynamic values # OCI Image Labels - Build Args for dynamic values
ARG VERSION="dev" ARG VERSION="dev"
ARG REVISION="" ARG REVISION=""
ARG CREATED="" ARG CREATED=""
ARG LICENSE="AGPL-3.0" ARG LICENSE="AGPL-3.0"
WORKDIR /app
ARG BUILD=oss
ARG DATABASE=sqlite
# Derive title and description based on BUILD type # Derive title and description based on BUILD type
ARG IMAGE_TITLE="Pangolin" ARG IMAGE_TITLE="Pangolin"
ARG IMAGE_DESCRIPTION="Identity-aware VPN and proxy for remote access to anything, anywhere" ARG IMAGE_DESCRIPTION="Identity-aware VPN and proxy for remote access to anything, anywhere"
RUN apk add --no-cache curl tzdata python3 make g++
# COPY package.json package-lock.json ./
COPY package*.json ./
RUN npm ci
COPY . .
RUN echo "export * from \"./$DATABASE\";" > server/db/index.ts
RUN echo "export const driver: \"pg\" | \"sqlite\" = \"$DATABASE\";" >> server/db/index.ts
RUN echo "export const build = \"$BUILD\" as \"saas\" | \"enterprise\" | \"oss\";" > server/build.ts
# Copy the appropriate TypeScript configuration based on build type
RUN if [ "$BUILD" = "oss" ]; then cp tsconfig.oss.json tsconfig.json; \
elif [ "$BUILD" = "saas" ]; then cp tsconfig.saas.json tsconfig.json; \
elif [ "$BUILD" = "enterprise" ]; then cp tsconfig.enterprise.json tsconfig.json; \
fi
# if the build is oss then remove the server/private directory
RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi
RUN if [ "$DATABASE" = "pg" ]; then npx drizzle-kit generate --dialect postgresql --schema ./server/db/pg/schema --out init; else npx drizzle-kit generate --dialect $DATABASE --schema ./server/db/$DATABASE/schema --out init; fi
RUN mkdir -p dist
RUN npm run next:build
RUN node esbuild.mjs -e server/index.ts -o dist/server.mjs -b $BUILD
RUN if [ "$DATABASE" = "pg" ]; then \
node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs; \
else \
node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs; \
fi
# test to make sure the build output is there and error if not
RUN test -f dist/server.mjs
RUN npm run build:cli
# Prune dev dependencies and clean up to prepare for copy to runner
RUN npm prune --omit=dev && npm cache clean --force
FROM node:24-alpine AS runner
WORKDIR /app WORKDIR /app
# Only curl and tzdata needed at runtime - no build tools! # Only curl and tzdata needed at runtime - no build tools!
@@ -46,10 +66,11 @@ RUN apk add --no-cache curl tzdata
# Copy pre-built node_modules from builder (already pruned to production only) # Copy pre-built node_modules from builder (already pruned to production only)
# This includes the compiled native modules like better-sqlite3 # This includes the compiled native modules like better-sqlite3
COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/.next/standalone ./ COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static COPY --from=builder /app/.next/static ./.next/static
COPY --from=builder /app/dist ./dist COPY --from=builder /app/dist ./dist
COPY --from=builder /app/server/migrations ./dist/init COPY --from=builder /app/init ./dist/init
COPY --from=builder /app/package.json ./package.json COPY --from=builder /app/package.json ./package.json
COPY ./cli/wrapper.sh /usr/local/bin/pangctl COPY ./cli/wrapper.sh /usr/local/bin/pangctl

2194
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,8 +12,6 @@
"license": "SEE LICENSE IN LICENSE AND README.md", "license": "SEE LICENSE IN LICENSE AND README.md",
"scripts": { "scripts": {
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts", "dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
"dev:check": "npx tsc --noEmit && npm run format:check",
"dev:setup": "cp config/config.example.yml config/config.yml && npm run set:oss && npm run set:sqlite && npm run db:sqlite:generate && npm run db:sqlite:push",
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts", "db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts", "db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
"db:pg:push": "npx tsx server/db/pg/migrate.ts", "db:pg:push": "npx tsx server/db/pg/migrate.ts",
@@ -26,13 +24,12 @@
"set:enterprise": "echo 'export const build = \"enterprise\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json", "set:enterprise": "echo 'export const build = \"enterprise\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json",
"set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts", "set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts",
"set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts", "set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts",
"build:next": "next build", "next:build": "next build",
"build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs", "build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs",
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs", "build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
"start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs", "start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs",
"email": "email dev --dir server/emails/templates --port 3005", "email": "email dev --dir server/emails/templates --port 3005",
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs", "build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs",
"format:check": "prettier --check .",
"format": "prettier --write ." "format": "prettier --write ."
}, },
"dependencies": { "dependencies": {
@@ -78,7 +75,9 @@
"class-variance-authority": "0.7.1", "class-variance-authority": "0.7.1",
"clsx": "2.1.1", "clsx": "2.1.1",
"cmdk": "1.1.1", "cmdk": "1.1.1",
"cookie": "1.1.1",
"cookie-parser": "1.4.7", "cookie-parser": "1.4.7",
"cookies": "0.9.1",
"cors": "2.8.5", "cors": "2.8.5",
"crypto-js": "4.2.0", "crypto-js": "4.2.0",
"d3": "7.9.0", "d3": "7.9.0",
@@ -91,6 +90,7 @@
"glob": "13.0.0", "glob": "13.0.0",
"helmet": "8.1.0", "helmet": "8.1.0",
"http-errors": "2.0.1", "http-errors": "2.0.1",
"i": "0.3.7",
"input-otp": "1.4.2", "input-otp": "1.4.2",
"ioredis": "5.9.2", "ioredis": "5.9.2",
"jmespath": "0.16.0", "jmespath": "0.16.0",
@@ -104,7 +104,10 @@
"next-themes": "0.4.6", "next-themes": "0.4.6",
"nextjs-toploader": "3.9.17", "nextjs-toploader": "3.9.17",
"node-cache": "5.1.2", "node-cache": "5.1.2",
"node-fetch": "3.3.2",
"nodemailer": "7.0.11", "nodemailer": "7.0.11",
"npm": "11.7.0",
"nprogress": "0.2.0",
"oslo": "1.2.1", "oslo": "1.2.1",
"pg": "8.17.1", "pg": "8.17.1",
"posthog-node": "5.23.0", "posthog-node": "5.23.0",
@@ -115,6 +118,7 @@
"react-easy-sort": "1.8.0", "react-easy-sort": "1.8.0",
"react-hook-form": "7.71.1", "react-hook-form": "7.71.1",
"react-icons": "5.5.0", "react-icons": "5.5.0",
"rebuild": "0.1.2",
"recharts": "2.15.4", "recharts": "2.15.4",
"reodotdev": "1.0.0", "reodotdev": "1.0.0",
"resend": "6.8.0", "resend": "6.8.0",

View File

@@ -37,55 +37,27 @@ const paramsSchema = z.strictObject({
const bodySchema = z.strictObject({ const bodySchema = z.strictObject({
logoUrl: z logoUrl: z
.union([ .union([
z.literal(""), z.string().length(0),
z z.url().refine(
.url("Must be a valid URL") async (url) => {
.superRefine(async (url, ctx) => {
try { try {
const response = await fetch(url, { const response = await fetch(url);
method: "HEAD" return (
}).catch(() => { response.status === 200 &&
// If HEAD fails (CORS or method not allowed), try GET (
return fetch(url, { method: "GET" }); response.headers.get("content-type") ?? ""
}); ).startsWith("image/")
);
if (response.status !== 200) {
ctx.addIssue({
code: "custom",
message: `Failed to load image. Please check that the URL is accessible.`
});
return;
}
const contentType =
response.headers.get("content-type") ?? "";
if (!contentType.startsWith("image/")) {
ctx.addIssue({
code: "custom",
message: `URL does not point to an image. Please provide a URL to an image file (e.g., .png, .jpg, .svg).`
});
return;
}
} catch (error) { } catch (error) {
let errorMessage = return false;
"Unable to verify image URL. Please check that the URL is accessible and points to an image file.";
if (error instanceof TypeError && error.message.includes("fetch")) {
errorMessage =
"Network error: Unable to reach the URL. Please check your internet connection and verify the URL is correct.";
} else if (error instanceof Error) {
errorMessage = `Error verifying URL: ${error.message}`;
}
ctx.addIssue({
code: "custom",
message: errorMessage
});
} }
}) },
{
error: "Invalid logo URL, must be a valid image URL"
}
)
]) ])
.transform((val) => (val === "" ? null : val)) .optional(),
.nullish(),
logoWidth: z.coerce.number<number>().min(1), logoWidth: z.coerce.number<number>().min(1),
logoHeight: z.coerce.number<number>().min(1), logoHeight: z.coerce.number<number>().min(1),
resourceTitle: z.string(), resourceTitle: z.string(),
@@ -106,7 +78,7 @@ export async function upsertLoginPageBranding(
next: NextFunction next: NextFunction
): Promise<any> { ): Promise<any> {
try { try {
const parsedBody = await bodySchema.safeParseAsync(req.body); const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) { if (!parsedBody.success) {
return next( return next(
createHttpError( createHttpError(
@@ -145,8 +117,9 @@ export async function upsertLoginPageBranding(
typeof loginPageBranding typeof loginPageBranding
>; >;
// Empty strings are transformed to null by the schema, which will clear the logo URL in the database if ((updateData.logoUrl ?? "").trim().length === 0) {
// We keep it as null (not undefined) because undefined fields are omitted from Drizzle updates updateData.logoUrl = undefined;
}
if ( if (
build !== "saas" && build !== "saas" &&

View File

@@ -9,6 +9,9 @@ import createHttpError from "http-errors";
import logger from "@server/logger"; import logger from "@server/logger";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
import { rebuildClientAssociationsFromClient } from "@server/lib/rebuildClientAssociations";
import { sendTerminateClient } from "./terminate";
import { OlmErrorCodes } from "../olm/error";
const archiveClientSchema = z.strictObject({ const archiveClientSchema = z.strictObject({
clientId: z.string().transform(Number).pipe(z.int().positive()) clientId: z.string().transform(Number).pipe(z.int().positive())
@@ -74,6 +77,9 @@ export async function archiveClient(
.update(clients) .update(clients)
.set({ archived: true }) .set({ archived: true })
.where(eq(clients.clientId, clientId)); .where(eq(clients.clientId, clientId));
// Rebuild associations to clean up related data
await rebuildClientAssociationsFromClient(client, trx);
}); });
return response(res, { return response(res, {

View File

@@ -1,6 +1,6 @@
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import { db } from "@server/db"; import { db } from "@server/db";
import { olms } from "@server/db"; import { olms, clients } from "@server/db";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
@@ -8,6 +8,9 @@ import response from "@server/lib/response";
import { z } from "zod"; import { z } from "zod";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import logger from "@server/logger"; import logger from "@server/logger";
import { rebuildClientAssociationsFromClient } from "@server/lib/rebuildClientAssociations";
import { sendTerminateClient } from "../client/terminate";
import { OlmErrorCodes } from "./error";
const paramsSchema = z const paramsSchema = z
.object({ .object({
@@ -34,7 +37,26 @@ export async function archiveUserOlm(
const { olmId } = parsedParams.data; const { olmId } = parsedParams.data;
// Archive the OLM and disconnect associated clients in a transaction
await db.transaction(async (trx) => { await db.transaction(async (trx) => {
// Find all clients associated with this OLM
const associatedClients = await trx
.select()
.from(clients)
.where(eq(clients.olmId, olmId));
// Disconnect clients from the OLM (set olmId to null)
for (const client of associatedClients) {
await trx
.update(clients)
.set({ olmId: null })
.where(eq(clients.clientId, client.clientId));
await rebuildClientAssociationsFromClient(client, trx);
await sendTerminateClient(client.clientId, OlmErrorCodes.TERMINATED_ARCHIVED, olmId);
}
// Archive the OLM (set archived to true)
await trx await trx
.update(olms) .update(olms)
.set({ archived: true }) .set({ archived: true })

View File

@@ -64,20 +64,16 @@ export async function ensureSetupToken() {
); );
} }
if (existingToken) { if (existingToken?.token !== envSetupToken) {
// Token exists in DB - update it if different console.warn(
if (existingToken.token !== envSetupToken) { "Overwriting existing token in DB since PANGOLIN_SETUP_TOKEN is set"
console.warn( );
"Overwriting existing token in DB since PANGOLIN_SETUP_TOKEN is set"
);
await db await db
.update(setupTokens) .update(setupTokens)
.set({ token: envSetupToken }) .set({ token: envSetupToken })
.where(eq(setupTokens.tokenId, existingToken.tokenId)); .where(eq(setupTokens.tokenId, existingToken.tokenId));
}
} else { } else {
// No existing token - insert new one
const tokenId = generateId(15); const tokenId = generateId(15);
await db.insert(setupTokens).values({ await db.insert(setupTokens).values({

View File

@@ -43,52 +43,25 @@ export type AuthPageCustomizationProps = {
const AuthPageFormSchema = z.object({ const AuthPageFormSchema = z.object({
logoUrl: z.union([ logoUrl: z.union([
z.literal(""), z.string().length(0),
z.url("Must be a valid URL").superRefine(async (url, ctx) => { z.url().refine(
try { async (url) => {
const response = await fetch(url, { try {
method: "HEAD" const response = await fetch(url);
}).catch(() => { return (
// If HEAD fails (CORS or method not allowed), try GET response.status === 200 &&
return fetch(url, { method: "GET" }); (response.headers.get("content-type") ?? "").startsWith(
}); "image/"
)
if (response.status !== 200) { );
ctx.addIssue({ } catch (error) {
code: "custom", return false;
message: `Failed to load image. Please check that the URL is accessible.`
});
return;
} }
},
const contentType = response.headers.get("content-type") ?? ""; {
if (!contentType.startsWith("image/")) { error: "Invalid logo URL, must be a valid image URL"
ctx.addIssue({
code: "custom",
message: `URL does not point to an image. Please provide a URL to an image file (e.g., .png, .jpg, .svg).`
});
return;
}
} catch (error) {
let errorMessage =
"Unable to verify image URL. Please check that the URL is accessible and points to an image file.";
if (
error instanceof TypeError &&
error.message.includes("fetch")
) {
errorMessage =
"Network error: Unable to reach the URL. Please check your internet connection and verify the URL is correct.";
} else if (error instanceof Error) {
errorMessage = `Error verifying URL: ${error.message}`;
}
ctx.addIssue({
code: "custom",
message: errorMessage
});
} }
}) )
]), ]),
logoWidth: z.coerce.number<number>().min(1), logoWidth: z.coerce.number<number>().min(1),
logoHeight: z.coerce.number<number>().min(1), logoHeight: z.coerce.number<number>().min(1),
@@ -432,7 +405,9 @@ export default function AuthPageBrandingForm({
<Button <Button
variant="destructive" variant="destructive"
type="submit" type="submit"
loading={isDeletingBranding} loading={
isUpdatingBranding || isDeletingBranding
}
disabled={ disabled={
isUpdatingBranding || isUpdatingBranding ||
isDeletingBranding || isDeletingBranding ||
@@ -447,7 +422,7 @@ export default function AuthPageBrandingForm({
<Button <Button
type="submit" type="submit"
form="auth-page-branding-form" form="auth-page-branding-form"
loading={isUpdatingBranding} loading={isUpdatingBranding || isDeletingBranding}
disabled={ disabled={
isUpdatingBranding || isUpdatingBranding ||
isDeletingBranding || isDeletingBranding ||

View File

@@ -1,13 +1,41 @@
"use client"; "use client";
import * as React from "react";
import * as NProgress from "nprogress";
import NextTopLoader from "nextjs-toploader"; import NextTopLoader from "nextjs-toploader";
import { usePathname, useRouter, useSearchParams } from "next/navigation";
export function TopLoader() { export function TopLoader() {
return ( return (
<NextTopLoader <>
color="var(--color-primary)" <NextTopLoader showSpinner={false} color="var(--color-primary)" />
showSpinner={false} <FinishingLoader />
height={2} </>
/> );
); }
function FinishingLoader() {
const pathname = usePathname();
const router = useRouter();
const searchParams = useSearchParams();
React.useEffect(() => {
NProgress.done();
}, [pathname, router, searchParams]);
React.useEffect(() => {
const linkClickListener = (ev: MouseEvent) => {
const element = ev.target as HTMLElement;
const closestlink = element.closest("a");
const isOpenToNewTabClick =
ev.ctrlKey ||
ev.shiftKey ||
ev.metaKey || // apple
(ev.button && ev.button == 1); // middle click, >IE9 + everyone else
if (closestlink && isOpenToNewTabClick) {
NProgress.done();
}
};
window.addEventListener("click", linkClickListener);
return () => window.removeEventListener("click", linkClickListener);
}, []);
return null;
} }

View File

@@ -28,6 +28,7 @@ import {
TableRow TableRow
} from "@app/components/ui/table"; } from "@app/components/ui/table";
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@app/components/ui/tabs"; import { Tabs, TabsList, TabsTrigger, TabsContent } from "@app/components/ui/tabs";
import ConfirmDeleteDialog from "@app/components/ConfirmDeleteDialog";
import { Loader2, RefreshCw } from "lucide-react"; import { Loader2, RefreshCw } from "lucide-react";
import moment from "moment"; import moment from "moment";
import { useUserContext } from "@app/hooks/useUserContext"; import { useUserContext } from "@app/hooks/useUserContext";
@@ -58,6 +59,8 @@ export default function ViewDevicesDialog({
const [devices, setDevices] = useState<Device[]>([]); const [devices, setDevices] = useState<Device[]>([]);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [isArchiveModalOpen, setIsArchiveModalOpen] = useState(false);
const [selectedDevice, setSelectedDevice] = useState<Device | null>(null);
const [activeTab, setActiveTab] = useState<"available" | "archived">("available"); const [activeTab, setActiveTab] = useState<"available" | "archived">("available");
const fetchDevices = async () => { const fetchDevices = async () => {
@@ -105,6 +108,8 @@ export default function ViewDevicesDialog({
d.olmId === olmId ? { ...d, archived: true } : d d.olmId === olmId ? { ...d, archived: true } : d
) )
); );
setIsArchiveModalOpen(false);
setSelectedDevice(null);
} catch (error: any) { } catch (error: any) {
console.error("Error archiving device:", error); console.error("Error archiving device:", error);
toast({ toast({
@@ -148,6 +153,8 @@ export default function ViewDevicesDialog({
function reset() { function reset() {
setDevices([]); setDevices([]);
setSelectedDevice(null);
setIsArchiveModalOpen(false);
} }
return ( return (
@@ -256,7 +263,12 @@ export default function ViewDevicesDialog({
<Button <Button
variant="outline" variant="outline"
onClick={() => { onClick={() => {
archiveDevice(device.olmId); setSelectedDevice(
device
);
setIsArchiveModalOpen(
true
);
}} }}
> >
{t( {t(
@@ -349,6 +361,34 @@ export default function ViewDevicesDialog({
</CredenzaFooter> </CredenzaFooter>
</CredenzaContent> </CredenzaContent>
</Credenza> </Credenza>
{selectedDevice && (
<ConfirmDeleteDialog
open={isArchiveModalOpen}
setOpen={(val) => {
setIsArchiveModalOpen(val);
if (!val) {
setSelectedDevice(null);
}
}}
dialog={
<div className="space-y-2">
<p>
{t("deviceQuestionArchive") ||
"Are you sure you want to archive this device?"}
</p>
<p>
{t("deviceMessageArchive") ||
"The device will be archived and removed from your active devices list."}
</p>
</div>
}
buttonText={t("deviceArchiveConfirm") || "Archive Device"}
onConfirm={async () => archiveDevice(selectedDevice.olmId)}
string={selectedDevice.name || selectedDevice.olmId}
title={t("archiveDevice") || "Archive Device"}
/>
)}
</> </>
); );
} }