mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-02 08:09:10 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2ba4b270f |
14
.github/dependabot.yml
vendored
14
.github/dependabot.yml
vendored
@@ -44,9 +44,19 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
groups:
|
groups:
|
||||||
patch-updates:
|
dev-patch-updates:
|
||||||
|
dependency-type: "development"
|
||||||
update-types:
|
update-types:
|
||||||
- "patch"
|
- "patch"
|
||||||
minor-updates:
|
dev-minor-updates:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
|
prod-patch-updates:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types:
|
||||||
|
- "patch"
|
||||||
|
prod-minor-updates:
|
||||||
|
dependency-type: "production"
|
||||||
update-types:
|
update-types:
|
||||||
- "minor"
|
- "minor"
|
||||||
73
.github/workflows/cicd.yml
vendored
73
.github/workflows/cicd.yml
vendored
@@ -482,77 +482,14 @@ jobs:
|
|||||||
echo "==> cosign sign (key) --recursive ${REF}"
|
echo "==> cosign sign (key) --recursive ${REF}"
|
||||||
cosign sign --key env://COSIGN_PRIVATE_KEY --recursive "${REF}"
|
cosign sign --key env://COSIGN_PRIVATE_KEY --recursive "${REF}"
|
||||||
|
|
||||||
# Retry wrapper for verification to handle registry propagation delays
|
|
||||||
retry_verify() {
|
|
||||||
local cmd="$1"
|
|
||||||
local attempts=6
|
|
||||||
local delay=5
|
|
||||||
local i=1
|
|
||||||
until eval "$cmd"; do
|
|
||||||
if [ $i -ge $attempts ]; then
|
|
||||||
echo "Verification failed after $attempts attempts"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
echo "Verification not yet available. Retry $i/$attempts after ${delay}s..."
|
|
||||||
sleep $delay
|
|
||||||
i=$((i+1))
|
|
||||||
delay=$((delay*2))
|
|
||||||
# Cap the delay to avoid very long waits
|
|
||||||
if [ $delay -gt 60 ]; then delay=60; fi
|
|
||||||
done
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "==> cosign verify (public key) ${REF}"
|
echo "==> cosign verify (public key) ${REF}"
|
||||||
if retry_verify "cosign verify --key env://COSIGN_PUBLIC_KEY '${REF}' -o text"; then
|
cosign verify --key env://COSIGN_PUBLIC_KEY "${REF}" -o text
|
||||||
VERIFIED_INDEX=true
|
|
||||||
else
|
|
||||||
VERIFIED_INDEX=false
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "==> cosign verify (keyless policy) ${REF}"
|
echo "==> cosign verify (keyless policy) ${REF}"
|
||||||
if retry_verify "cosign verify --certificate-oidc-issuer '${issuer}' --certificate-identity-regexp '${id_regex}' '${REF}' -o text"; then
|
cosign verify \
|
||||||
VERIFIED_INDEX_KEYLESS=true
|
--certificate-oidc-issuer "${issuer}" \
|
||||||
else
|
--certificate-identity-regexp "${id_regex}" \
|
||||||
VERIFIED_INDEX_KEYLESS=false
|
"${REF}" -o text
|
||||||
fi
|
|
||||||
|
|
||||||
# If index verification fails, attempt to verify child platform manifests
|
|
||||||
if [ "${VERIFIED_INDEX}" != "true" ] || [ "${VERIFIED_INDEX_KEYLESS}" != "true" ]; then
|
|
||||||
echo "Index verification not available; attempting child manifest verification for ${BASE_IMAGE}:${IMAGE_TAG}"
|
|
||||||
CHILD_VERIFIED=false
|
|
||||||
|
|
||||||
for ARCH in arm64 amd64; do
|
|
||||||
CHILD_TAG="${IMAGE_TAG}-${ARCH}"
|
|
||||||
echo "Resolving child digest for ${BASE_IMAGE}:${CHILD_TAG}"
|
|
||||||
CHILD_DIGEST="$(skopeo inspect --retry-times 3 docker://${BASE_IMAGE}:${CHILD_TAG} | jq -r '.Digest' || true)"
|
|
||||||
if [ -n "${CHILD_DIGEST}" ] && [ "${CHILD_DIGEST}" != "null" ]; then
|
|
||||||
CHILD_REF="${BASE_IMAGE}@${CHILD_DIGEST}"
|
|
||||||
echo "==> cosign verify (public key) child ${CHILD_REF}"
|
|
||||||
if retry_verify "cosign verify --key env://COSIGN_PUBLIC_KEY '${CHILD_REF}' -o text"; then
|
|
||||||
CHILD_VERIFIED=true
|
|
||||||
echo "Public key verification succeeded for child ${CHILD_REF}"
|
|
||||||
else
|
|
||||||
echo "Public key verification failed for child ${CHILD_REF}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "==> cosign verify (keyless policy) child ${CHILD_REF}"
|
|
||||||
if retry_verify "cosign verify --certificate-oidc-issuer '${issuer}' --certificate-identity-regexp '${id_regex}' '${CHILD_REF}' -o text"; then
|
|
||||||
CHILD_VERIFIED=true
|
|
||||||
echo "Keyless verification succeeded for child ${CHILD_REF}"
|
|
||||||
else
|
|
||||||
echo "Keyless verification failed for child ${CHILD_REF}"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "No child digest found for ${BASE_IMAGE}:${CHILD_TAG}; skipping"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ "${CHILD_VERIFIED}" != "true" ]; then
|
|
||||||
echo "Failed to verify index and no child manifests verified for ${BASE_IMAGE}:${IMAGE_TAG}"
|
|
||||||
exit 10
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "✓ Successfully signed and verified ${BASE_IMAGE}:${IMAGE_TAG}"
|
echo "✓ Successfully signed and verified ${BASE_IMAGE}:${IMAGE_TAG}"
|
||||||
done
|
done
|
||||||
|
|||||||
79
Dockerfile
79
Dockerfile
@@ -1,43 +1,63 @@
|
|||||||
FROM node:24-alpine AS builder
|
FROM node:24-alpine AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ARG BUILD=oss
|
|
||||||
ARG DATABASE=sqlite
|
|
||||||
|
|
||||||
RUN apk add --no-cache python3 make g++
|
|
||||||
|
|
||||||
# COPY package.json package-lock.json ./
|
|
||||||
COPY package*.json ./
|
|
||||||
RUN npm ci
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi && \
|
|
||||||
npm run set:$DATABASE && \
|
|
||||||
npm run set:$BUILD && \
|
|
||||||
npm run db:$DATABASE:generate && \
|
|
||||||
npm run build:$DATABASE && \
|
|
||||||
npm run build:cli
|
|
||||||
|
|
||||||
# test to make sure the build output is there and error if not
|
|
||||||
RUN test -f dist/server.mjs
|
|
||||||
|
|
||||||
# Prune dev dependencies and clean up to prepare for copy to runner
|
|
||||||
RUN npm prune --omit=dev && npm cache clean --force
|
|
||||||
|
|
||||||
FROM node:24-alpine AS runner
|
|
||||||
|
|
||||||
# OCI Image Labels - Build Args for dynamic values
|
# OCI Image Labels - Build Args for dynamic values
|
||||||
ARG VERSION="dev"
|
ARG VERSION="dev"
|
||||||
ARG REVISION=""
|
ARG REVISION=""
|
||||||
ARG CREATED=""
|
ARG CREATED=""
|
||||||
ARG LICENSE="AGPL-3.0"
|
ARG LICENSE="AGPL-3.0"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ARG BUILD=oss
|
||||||
|
ARG DATABASE=sqlite
|
||||||
|
|
||||||
# Derive title and description based on BUILD type
|
# Derive title and description based on BUILD type
|
||||||
ARG IMAGE_TITLE="Pangolin"
|
ARG IMAGE_TITLE="Pangolin"
|
||||||
ARG IMAGE_DESCRIPTION="Identity-aware VPN and proxy for remote access to anything, anywhere"
|
ARG IMAGE_DESCRIPTION="Identity-aware VPN and proxy for remote access to anything, anywhere"
|
||||||
|
|
||||||
|
RUN apk add --no-cache curl tzdata python3 make g++
|
||||||
|
|
||||||
|
# COPY package.json package-lock.json ./
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN echo "export * from \"./$DATABASE\";" > server/db/index.ts
|
||||||
|
RUN echo "export const driver: \"pg\" | \"sqlite\" = \"$DATABASE\";" >> server/db/index.ts
|
||||||
|
|
||||||
|
RUN echo "export const build = \"$BUILD\" as \"saas\" | \"enterprise\" | \"oss\";" > server/build.ts
|
||||||
|
|
||||||
|
# Copy the appropriate TypeScript configuration based on build type
|
||||||
|
RUN if [ "$BUILD" = "oss" ]; then cp tsconfig.oss.json tsconfig.json; \
|
||||||
|
elif [ "$BUILD" = "saas" ]; then cp tsconfig.saas.json tsconfig.json; \
|
||||||
|
elif [ "$BUILD" = "enterprise" ]; then cp tsconfig.enterprise.json tsconfig.json; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# if the build is oss then remove the server/private directory
|
||||||
|
RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi
|
||||||
|
|
||||||
|
RUN if [ "$DATABASE" = "pg" ]; then npx drizzle-kit generate --dialect postgresql --schema ./server/db/pg/schema --out init; else npx drizzle-kit generate --dialect $DATABASE --schema ./server/db/$DATABASE/schema --out init; fi
|
||||||
|
|
||||||
|
RUN mkdir -p dist
|
||||||
|
RUN npm run next:build
|
||||||
|
RUN node esbuild.mjs -e server/index.ts -o dist/server.mjs -b $BUILD
|
||||||
|
RUN if [ "$DATABASE" = "pg" ]; then \
|
||||||
|
node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs; \
|
||||||
|
else \
|
||||||
|
node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# test to make sure the build output is there and error if not
|
||||||
|
RUN test -f dist/server.mjs
|
||||||
|
|
||||||
|
RUN npm run build:cli
|
||||||
|
|
||||||
|
# Prune dev dependencies and clean up to prepare for copy to runner
|
||||||
|
RUN npm prune --omit=dev && npm cache clean --force
|
||||||
|
|
||||||
|
FROM node:24-alpine AS runner
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Only curl and tzdata needed at runtime - no build tools!
|
# Only curl and tzdata needed at runtime - no build tools!
|
||||||
@@ -46,10 +66,11 @@ RUN apk add --no-cache curl tzdata
|
|||||||
# Copy pre-built node_modules from builder (already pruned to production only)
|
# Copy pre-built node_modules from builder (already pruned to production only)
|
||||||
# This includes the compiled native modules like better-sqlite3
|
# This includes the compiled native modules like better-sqlite3
|
||||||
COPY --from=builder /app/node_modules ./node_modules
|
COPY --from=builder /app/node_modules ./node_modules
|
||||||
|
|
||||||
COPY --from=builder /app/.next/standalone ./
|
COPY --from=builder /app/.next/standalone ./
|
||||||
COPY --from=builder /app/.next/static ./.next/static
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
COPY --from=builder /app/dist ./dist
|
COPY --from=builder /app/dist ./dist
|
||||||
COPY --from=builder /app/server/migrations ./dist/init
|
COPY --from=builder /app/init ./dist/init
|
||||||
COPY --from=builder /app/package.json ./package.json
|
COPY --from=builder /app/package.json ./package.json
|
||||||
|
|
||||||
COPY ./cli/wrapper.sh /usr/local/bin/pangctl
|
COPY ./cli/wrapper.sh /usr/local/bin/pangctl
|
||||||
|
|||||||
2194
package-lock.json
generated
2194
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
package.json
12
package.json
@@ -12,8 +12,6 @@
|
|||||||
"license": "SEE LICENSE IN LICENSE AND README.md",
|
"license": "SEE LICENSE IN LICENSE AND README.md",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
|
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
|
||||||
"dev:check": "npx tsc --noEmit && npm run format:check",
|
|
||||||
"dev:setup": "cp config/config.example.yml config/config.yml && npm run set:oss && npm run set:sqlite && npm run db:sqlite:generate && npm run db:sqlite:push",
|
|
||||||
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
|
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
|
||||||
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
|
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
|
||||||
"db:pg:push": "npx tsx server/db/pg/migrate.ts",
|
"db:pg:push": "npx tsx server/db/pg/migrate.ts",
|
||||||
@@ -26,13 +24,12 @@
|
|||||||
"set:enterprise": "echo 'export const build = \"enterprise\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json",
|
"set:enterprise": "echo 'export const build = \"enterprise\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json",
|
||||||
"set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts",
|
"set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts",
|
||||||
"set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts",
|
"set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts",
|
||||||
"build:next": "next build",
|
"next:build": "next build",
|
||||||
"build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs",
|
"build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs",
|
||||||
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
|
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
|
||||||
"start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs",
|
"start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs",
|
||||||
"email": "email dev --dir server/emails/templates --port 3005",
|
"email": "email dev --dir server/emails/templates --port 3005",
|
||||||
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs",
|
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs",
|
||||||
"format:check": "prettier --check .",
|
|
||||||
"format": "prettier --write ."
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -78,7 +75,9 @@
|
|||||||
"class-variance-authority": "0.7.1",
|
"class-variance-authority": "0.7.1",
|
||||||
"clsx": "2.1.1",
|
"clsx": "2.1.1",
|
||||||
"cmdk": "1.1.1",
|
"cmdk": "1.1.1",
|
||||||
|
"cookie": "1.1.1",
|
||||||
"cookie-parser": "1.4.7",
|
"cookie-parser": "1.4.7",
|
||||||
|
"cookies": "0.9.1",
|
||||||
"cors": "2.8.5",
|
"cors": "2.8.5",
|
||||||
"crypto-js": "4.2.0",
|
"crypto-js": "4.2.0",
|
||||||
"d3": "7.9.0",
|
"d3": "7.9.0",
|
||||||
@@ -91,6 +90,7 @@
|
|||||||
"glob": "13.0.0",
|
"glob": "13.0.0",
|
||||||
"helmet": "8.1.0",
|
"helmet": "8.1.0",
|
||||||
"http-errors": "2.0.1",
|
"http-errors": "2.0.1",
|
||||||
|
"i": "0.3.7",
|
||||||
"input-otp": "1.4.2",
|
"input-otp": "1.4.2",
|
||||||
"ioredis": "5.9.2",
|
"ioredis": "5.9.2",
|
||||||
"jmespath": "0.16.0",
|
"jmespath": "0.16.0",
|
||||||
@@ -104,7 +104,10 @@
|
|||||||
"next-themes": "0.4.6",
|
"next-themes": "0.4.6",
|
||||||
"nextjs-toploader": "3.9.17",
|
"nextjs-toploader": "3.9.17",
|
||||||
"node-cache": "5.1.2",
|
"node-cache": "5.1.2",
|
||||||
|
"node-fetch": "3.3.2",
|
||||||
"nodemailer": "7.0.11",
|
"nodemailer": "7.0.11",
|
||||||
|
"npm": "11.7.0",
|
||||||
|
"nprogress": "0.2.0",
|
||||||
"oslo": "1.2.1",
|
"oslo": "1.2.1",
|
||||||
"pg": "8.17.1",
|
"pg": "8.17.1",
|
||||||
"posthog-node": "5.23.0",
|
"posthog-node": "5.23.0",
|
||||||
@@ -115,6 +118,7 @@
|
|||||||
"react-easy-sort": "1.8.0",
|
"react-easy-sort": "1.8.0",
|
||||||
"react-hook-form": "7.71.1",
|
"react-hook-form": "7.71.1",
|
||||||
"react-icons": "5.5.0",
|
"react-icons": "5.5.0",
|
||||||
|
"rebuild": "0.1.2",
|
||||||
"recharts": "2.15.4",
|
"recharts": "2.15.4",
|
||||||
"reodotdev": "1.0.0",
|
"reodotdev": "1.0.0",
|
||||||
"resend": "6.8.0",
|
"resend": "6.8.0",
|
||||||
|
|||||||
@@ -105,11 +105,13 @@ function getOpenApiDocumentation() {
|
|||||||
servers: [{ url: "/v1" }]
|
servers: [{ url: "/v1" }]
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!process.env.DISABLE_GEN_OPENAPI) {
|
||||||
// convert to yaml and save to file
|
// convert to yaml and save to file
|
||||||
const outputPath = path.join(APP_PATH, "openapi.yaml");
|
const outputPath = path.join(APP_PATH, "openapi.yaml");
|
||||||
const yamlOutput = yaml.dump(generated);
|
const yamlOutput = yaml.dump(generated);
|
||||||
fs.writeFileSync(outputPath, yamlOutput, "utf8");
|
fs.writeFileSync(outputPath, yamlOutput, "utf8");
|
||||||
logger.info(`OpenAPI documentation saved to ${outputPath}`);
|
logger.info(`OpenAPI documentation saved to ${outputPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
return generated;
|
return generated;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { eq, sql, and } from "drizzle-orm";
|
import { eq, sql, and } from "drizzle-orm";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { PutObjectCommand } from "@aws-sdk/client-s3";
|
import { PutObjectCommand } from "@aws-sdk/client-s3";
|
||||||
import * as fs from "fs/promises";
|
|
||||||
import * as path from "path";
|
|
||||||
import {
|
import {
|
||||||
db,
|
db,
|
||||||
usage,
|
usage,
|
||||||
@@ -34,8 +32,7 @@ interface StripeEvent {
|
|||||||
export function noop() {
|
export function noop() {
|
||||||
if (
|
if (
|
||||||
build !== "saas" ||
|
build !== "saas" ||
|
||||||
!process.env.S3_BUCKET ||
|
!process.env.S3_BUCKET
|
||||||
!process.env.LOCAL_FILE_PATH
|
|
||||||
) {
|
) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -44,31 +41,37 @@ export function noop() {
|
|||||||
|
|
||||||
export class UsageService {
|
export class UsageService {
|
||||||
private bucketName: string | undefined;
|
private bucketName: string | undefined;
|
||||||
private currentEventFile: string | null = null;
|
private events: StripeEvent[] = [];
|
||||||
private currentFileStartTime: number = 0;
|
private lastUploadTime: number = Date.now();
|
||||||
private eventsDir: string | undefined;
|
private isUploading: boolean = false;
|
||||||
private uploadingFiles: Set<string> = new Set();
|
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
if (noop()) {
|
if (noop()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// this.bucketName = privateConfig.getRawPrivateConfig().stripe?.s3Bucket;
|
|
||||||
// this.eventsDir = privateConfig.getRawPrivateConfig().stripe?.localFilePath;
|
|
||||||
this.bucketName = process.env.S3_BUCKET || undefined;
|
this.bucketName = process.env.S3_BUCKET || undefined;
|
||||||
this.eventsDir = process.env.LOCAL_FILE_PATH || undefined;
|
|
||||||
|
|
||||||
// Ensure events directory exists
|
// Periodically check and upload events
|
||||||
this.initializeEventsDirectory().then(() => {
|
|
||||||
this.uploadPendingEventFilesOnStartup();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Periodically check for old event files to upload
|
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
this.uploadOldEventFiles().catch((err) => {
|
this.checkAndUploadEvents().catch((err) => {
|
||||||
logger.error("Error in periodic event file upload:", err);
|
logger.error("Error in periodic event upload:", err);
|
||||||
});
|
});
|
||||||
}, 30000); // every 30 seconds
|
}, 30000); // every 30 seconds
|
||||||
|
|
||||||
|
// Handle graceful shutdown on SIGTERM
|
||||||
|
process.on("SIGTERM", async () => {
|
||||||
|
logger.info("SIGTERM received, uploading events before shutdown...");
|
||||||
|
await this.forceUpload();
|
||||||
|
logger.info("Events uploaded, proceeding with shutdown");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle SIGINT as well (Ctrl+C)
|
||||||
|
process.on("SIGINT", async () => {
|
||||||
|
logger.info("SIGINT received, uploading events before shutdown...");
|
||||||
|
await this.forceUpload();
|
||||||
|
logger.info("Events uploaded, proceeding with shutdown");
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,85 +81,6 @@ export class UsageService {
|
|||||||
return Math.round(value * 100000000000) / 100000000000; // 11 decimal places
|
return Math.round(value * 100000000000) / 100000000000; // 11 decimal places
|
||||||
}
|
}
|
||||||
|
|
||||||
private async initializeEventsDirectory(): Promise<void> {
|
|
||||||
if (!this.eventsDir) {
|
|
||||||
logger.warn(
|
|
||||||
"Stripe local file path is not configured, skipping events directory initialization."
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await fs.mkdir(this.eventsDir, { recursive: true });
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to create events directory:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async uploadPendingEventFilesOnStartup(): Promise<void> {
|
|
||||||
if (!this.eventsDir || !this.bucketName) {
|
|
||||||
logger.warn(
|
|
||||||
"Stripe local file path or bucket name is not configured, skipping leftover event file upload."
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const files = await fs.readdir(this.eventsDir);
|
|
||||||
for (const file of files) {
|
|
||||||
if (file.endsWith(".json")) {
|
|
||||||
const filePath = path.join(this.eventsDir, file);
|
|
||||||
try {
|
|
||||||
const fileContent = await fs.readFile(
|
|
||||||
filePath,
|
|
||||||
"utf-8"
|
|
||||||
);
|
|
||||||
const events = JSON.parse(fileContent);
|
|
||||||
if (Array.isArray(events) && events.length > 0) {
|
|
||||||
// Upload to S3
|
|
||||||
const uploadCommand = new PutObjectCommand({
|
|
||||||
Bucket: this.bucketName,
|
|
||||||
Key: file,
|
|
||||||
Body: fileContent,
|
|
||||||
ContentType: "application/json"
|
|
||||||
});
|
|
||||||
await s3Client.send(uploadCommand);
|
|
||||||
|
|
||||||
// Check if file still exists before unlinking
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
logger.debug(
|
|
||||||
`Startup file ${file} was already deleted`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`Uploaded leftover event file ${file} to S3 with ${events.length} events`
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
// Remove empty file
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
logger.debug(
|
|
||||||
`Empty startup file ${file} was already deleted`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(
|
|
||||||
`Error processing leftover event file ${file}:`,
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to scan for leftover event files");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public async add(
|
public async add(
|
||||||
orgId: string,
|
orgId: string,
|
||||||
featureId: FeatureId,
|
featureId: FeatureId,
|
||||||
@@ -450,121 +374,58 @@ export class UsageService {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
await this.writeEventToFile(event);
|
this.addEventToMemory(event);
|
||||||
await this.checkAndUploadFile();
|
await this.checkAndUploadEvents();
|
||||||
}
|
}
|
||||||
|
|
||||||
private async writeEventToFile(event: StripeEvent): Promise<void> {
|
private addEventToMemory(event: StripeEvent): void {
|
||||||
if (!this.eventsDir || !this.bucketName) {
|
if (!this.bucketName) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Stripe local file path or bucket name is not configured, skipping event file write."
|
"S3 bucket name is not configured, skipping event storage."
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!this.currentEventFile) {
|
this.events.push(event);
|
||||||
this.currentEventFile = this.generateEventFileName();
|
|
||||||
this.currentFileStartTime = Date.now();
|
|
||||||
}
|
|
||||||
|
|
||||||
const filePath = path.join(this.eventsDir, this.currentEventFile);
|
|
||||||
|
|
||||||
try {
|
|
||||||
let events: StripeEvent[] = [];
|
|
||||||
|
|
||||||
// Try to read existing file
|
|
||||||
try {
|
|
||||||
const fileContent = await fs.readFile(filePath, "utf-8");
|
|
||||||
events = JSON.parse(fileContent);
|
|
||||||
} catch (error) {
|
|
||||||
// File doesn't exist or is empty, start with empty array
|
|
||||||
events = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add new event
|
|
||||||
events.push(event);
|
|
||||||
|
|
||||||
// Write back to file
|
|
||||||
await fs.writeFile(filePath, JSON.stringify(events, null, 2));
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to write event to file:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async checkAndUploadFile(): Promise<void> {
|
|
||||||
if (!this.currentEventFile) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async checkAndUploadEvents(): Promise<void> {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const fileAge = now - this.currentFileStartTime;
|
const timeSinceLastUpload = now - this.lastUploadTime;
|
||||||
|
|
||||||
// Check if file is at least 1 minute old
|
// Check if at least 1 minute has passed since last upload
|
||||||
if (fileAge >= 60000) {
|
if (timeSinceLastUpload >= 60000 && this.events.length > 0) {
|
||||||
// 60 seconds
|
await this.uploadEventsToS3();
|
||||||
await this.uploadFileToS3();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async uploadFileToS3(): Promise<void> {
|
private async uploadEventsToS3(): Promise<void> {
|
||||||
if (!this.bucketName || !this.eventsDir) {
|
if (!this.bucketName) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Stripe local file path or bucket name is not configured, skipping S3 upload."
|
"S3 bucket name is not configured, skipping S3 upload."
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!this.currentEventFile) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileName = this.currentEventFile;
|
|
||||||
const filePath = path.join(this.eventsDir, fileName);
|
|
||||||
|
|
||||||
// Check if this file is already being uploaded
|
|
||||||
if (this.uploadingFiles.has(fileName)) {
|
|
||||||
logger.debug(
|
|
||||||
`File ${fileName} is already being uploaded, skipping`
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark file as being uploaded
|
if (this.events.length === 0) {
|
||||||
this.uploadingFiles.add(fileName);
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if already uploading
|
||||||
|
if (this.isUploading) {
|
||||||
|
logger.debug("Already uploading events, skipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isUploading = true;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check if file exists before trying to read it
|
// Take a snapshot of current events and clear the array
|
||||||
try {
|
const eventsToUpload = [...this.events];
|
||||||
await fs.access(filePath);
|
this.events = [];
|
||||||
} catch (error) {
|
this.lastUploadTime = Date.now();
|
||||||
logger.debug(
|
|
||||||
`File ${fileName} does not exist, may have been already processed`
|
|
||||||
);
|
|
||||||
this.uploadingFiles.delete(fileName);
|
|
||||||
// Reset current file if it was this file
|
|
||||||
if (this.currentEventFile === fileName) {
|
|
||||||
this.currentEventFile = null;
|
|
||||||
this.currentFileStartTime = 0;
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if file exists and has content
|
const fileName = this.generateEventFileName();
|
||||||
const fileContent = await fs.readFile(filePath, "utf-8");
|
const fileContent = JSON.stringify(eventsToUpload, null, 2);
|
||||||
const events = JSON.parse(fileContent);
|
|
||||||
|
|
||||||
if (events.length === 0) {
|
|
||||||
// No events to upload, just clean up
|
|
||||||
try {
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
// File may have been already deleted
|
|
||||||
logger.debug(
|
|
||||||
`File ${fileName} was already deleted during cleanup`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
this.currentEventFile = null;
|
|
||||||
this.uploadingFiles.delete(fileName);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload to S3
|
// Upload to S3
|
||||||
const uploadCommand = new PutObjectCommand({
|
const uploadCommand = new PutObjectCommand({
|
||||||
@@ -576,29 +437,15 @@ export class UsageService {
|
|||||||
|
|
||||||
await s3Client.send(uploadCommand);
|
await s3Client.send(uploadCommand);
|
||||||
|
|
||||||
// Clean up local file - check if it still exists before unlinking
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
// File may have been already deleted by another process
|
|
||||||
logger.debug(
|
|
||||||
`File ${fileName} was already deleted during upload`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`Uploaded ${fileName} to S3 with ${events.length} events`
|
`Uploaded ${fileName} to S3 with ${eventsToUpload.length} events`
|
||||||
);
|
);
|
||||||
|
|
||||||
// Reset for next file
|
|
||||||
this.currentEventFile = null;
|
|
||||||
this.currentFileStartTime = 0;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to upload ${fileName} to S3:`, error);
|
logger.error("Failed to upload events to S3:", error);
|
||||||
|
// Note: Events are lost if upload fails. In a production system,
|
||||||
|
// you might want to add the events back to the array or implement retry logic
|
||||||
} finally {
|
} finally {
|
||||||
// Always remove from uploading set
|
this.isUploading = false;
|
||||||
this.uploadingFiles.delete(fileName);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -695,111 +542,10 @@ export class UsageService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async forceUpload(): Promise<void> {
|
public async forceUpload(): Promise<void> {
|
||||||
await this.uploadFileToS3();
|
if (this.events.length > 0) {
|
||||||
}
|
// Force upload regardless of time
|
||||||
|
this.lastUploadTime = 0; // Reset to force upload
|
||||||
/**
|
await this.uploadEventsToS3();
|
||||||
* Scan the events directory for files older than 1 minute and upload them if not empty.
|
|
||||||
*/
|
|
||||||
private async uploadOldEventFiles(): Promise<void> {
|
|
||||||
if (!this.eventsDir || !this.bucketName) {
|
|
||||||
logger.warn(
|
|
||||||
"Stripe local file path or bucket name is not configured, skipping old event file upload."
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const files = await fs.readdir(this.eventsDir);
|
|
||||||
const now = Date.now();
|
|
||||||
for (const file of files) {
|
|
||||||
if (!file.endsWith(".json")) continue;
|
|
||||||
|
|
||||||
// Skip files that are already being uploaded
|
|
||||||
if (this.uploadingFiles.has(file)) {
|
|
||||||
logger.debug(
|
|
||||||
`Skipping file ${file} as it's already being uploaded`
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const filePath = path.join(this.eventsDir, file);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if file still exists before processing
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
} catch (accessError) {
|
|
||||||
logger.debug(`File ${file} does not exist, skipping`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const stat = await fs.stat(filePath);
|
|
||||||
const age = now - stat.mtimeMs;
|
|
||||||
if (age >= 90000) {
|
|
||||||
// 1.5 minutes - Mark as being uploaded
|
|
||||||
this.uploadingFiles.add(file);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const fileContent = await fs.readFile(
|
|
||||||
filePath,
|
|
||||||
"utf-8"
|
|
||||||
);
|
|
||||||
const events = JSON.parse(fileContent);
|
|
||||||
if (Array.isArray(events) && events.length > 0) {
|
|
||||||
// Upload to S3
|
|
||||||
const uploadCommand = new PutObjectCommand({
|
|
||||||
Bucket: this.bucketName,
|
|
||||||
Key: file,
|
|
||||||
Body: fileContent,
|
|
||||||
ContentType: "application/json"
|
|
||||||
});
|
|
||||||
await s3Client.send(uploadCommand);
|
|
||||||
|
|
||||||
// Check if file still exists before unlinking
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
logger.debug(
|
|
||||||
`File ${file} was already deleted during interval upload`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`Interval: Uploaded event file ${file} to S3 with ${events.length} events`
|
|
||||||
);
|
|
||||||
// If this was the current event file, reset it
|
|
||||||
if (this.currentEventFile === file) {
|
|
||||||
this.currentEventFile = null;
|
|
||||||
this.currentFileStartTime = 0;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Remove empty file
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
await fs.unlink(filePath);
|
|
||||||
} catch (unlinkError) {
|
|
||||||
logger.debug(
|
|
||||||
`Empty file ${file} was already deleted`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
// Always remove from uploading set
|
|
||||||
this.uploadingFiles.delete(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(
|
|
||||||
`Interval: Error processing event file ${file}:`,
|
|
||||||
err
|
|
||||||
);
|
|
||||||
// Remove from uploading set on error
|
|
||||||
this.uploadingFiles.delete(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error("Interval: Failed to scan for event files:", err);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -128,10 +128,7 @@ export class PrivateConfig {
|
|||||||
if (this.rawPrivateConfig.stripe?.s3Bucket) {
|
if (this.rawPrivateConfig.stripe?.s3Bucket) {
|
||||||
process.env.S3_BUCKET = this.rawPrivateConfig.stripe.s3Bucket;
|
process.env.S3_BUCKET = this.rawPrivateConfig.stripe.s3Bucket;
|
||||||
}
|
}
|
||||||
if (this.rawPrivateConfig.stripe?.localFilePath) {
|
|
||||||
process.env.LOCAL_FILE_PATH =
|
|
||||||
this.rawPrivateConfig.stripe.localFilePath;
|
|
||||||
}
|
|
||||||
if (this.rawPrivateConfig.stripe?.s3Region) {
|
if (this.rawPrivateConfig.stripe?.s3Region) {
|
||||||
process.env.S3_REGION = this.rawPrivateConfig.stripe.s3Region;
|
process.env.S3_REGION = this.rawPrivateConfig.stripe.s3Region;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -161,7 +161,7 @@ export const privateConfigSchema = z.object({
|
|||||||
webhook_secret: z.string(),
|
webhook_secret: z.string(),
|
||||||
s3Bucket: z.string(),
|
s3Bucket: z.string(),
|
||||||
s3Region: z.string().default("us-east-1"),
|
s3Region: z.string().default("us-east-1"),
|
||||||
localFilePath: z.string()
|
localFilePath: z.string().optional()
|
||||||
})
|
})
|
||||||
.optional()
|
.optional()
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -37,55 +37,27 @@ const paramsSchema = z.strictObject({
|
|||||||
const bodySchema = z.strictObject({
|
const bodySchema = z.strictObject({
|
||||||
logoUrl: z
|
logoUrl: z
|
||||||
.union([
|
.union([
|
||||||
z.literal(""),
|
z.string().length(0),
|
||||||
z
|
z.url().refine(
|
||||||
.url("Must be a valid URL")
|
async (url) => {
|
||||||
.superRefine(async (url, ctx) => {
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url);
|
||||||
method: "HEAD"
|
return (
|
||||||
}).catch(() => {
|
response.status === 200 &&
|
||||||
// If HEAD fails (CORS or method not allowed), try GET
|
(
|
||||||
return fetch(url, { method: "GET" });
|
response.headers.get("content-type") ?? ""
|
||||||
});
|
).startsWith("image/")
|
||||||
|
);
|
||||||
if (response.status !== 200) {
|
|
||||||
ctx.addIssue({
|
|
||||||
code: "custom",
|
|
||||||
message: `Failed to load image. Please check that the URL is accessible.`
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const contentType =
|
|
||||||
response.headers.get("content-type") ?? "";
|
|
||||||
if (!contentType.startsWith("image/")) {
|
|
||||||
ctx.addIssue({
|
|
||||||
code: "custom",
|
|
||||||
message: `URL does not point to an image. Please provide a URL to an image file (e.g., .png, .jpg, .svg).`
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
let errorMessage =
|
return false;
|
||||||
"Unable to verify image URL. Please check that the URL is accessible and points to an image file.";
|
|
||||||
|
|
||||||
if (error instanceof TypeError && error.message.includes("fetch")) {
|
|
||||||
errorMessage =
|
|
||||||
"Network error: Unable to reach the URL. Please check your internet connection and verify the URL is correct.";
|
|
||||||
} else if (error instanceof Error) {
|
|
||||||
errorMessage = `Error verifying URL: ${error.message}`;
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
ctx.addIssue({
|
{
|
||||||
code: "custom",
|
error: "Invalid logo URL, must be a valid image URL"
|
||||||
message: errorMessage
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
])
|
])
|
||||||
.transform((val) => (val === "" ? null : val))
|
.optional(),
|
||||||
.nullish(),
|
|
||||||
logoWidth: z.coerce.number<number>().min(1),
|
logoWidth: z.coerce.number<number>().min(1),
|
||||||
logoHeight: z.coerce.number<number>().min(1),
|
logoHeight: z.coerce.number<number>().min(1),
|
||||||
resourceTitle: z.string(),
|
resourceTitle: z.string(),
|
||||||
@@ -106,7 +78,7 @@ export async function upsertLoginPageBranding(
|
|||||||
next: NextFunction
|
next: NextFunction
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
try {
|
try {
|
||||||
const parsedBody = await bodySchema.safeParseAsync(req.body);
|
const parsedBody = bodySchema.safeParse(req.body);
|
||||||
if (!parsedBody.success) {
|
if (!parsedBody.success) {
|
||||||
return next(
|
return next(
|
||||||
createHttpError(
|
createHttpError(
|
||||||
@@ -145,8 +117,9 @@ export async function upsertLoginPageBranding(
|
|||||||
typeof loginPageBranding
|
typeof loginPageBranding
|
||||||
>;
|
>;
|
||||||
|
|
||||||
// Empty strings are transformed to null by the schema, which will clear the logo URL in the database
|
if ((updateData.logoUrl ?? "").trim().length === 0) {
|
||||||
// We keep it as null (not undefined) because undefined fields are omitted from Drizzle updates
|
updateData.logoUrl = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
build !== "saas" &&
|
build !== "saas" &&
|
||||||
|
|||||||
@@ -9,6 +9,9 @@ import createHttpError from "http-errors";
|
|||||||
import logger from "@server/logger";
|
import logger from "@server/logger";
|
||||||
import { fromError } from "zod-validation-error";
|
import { fromError } from "zod-validation-error";
|
||||||
import { OpenAPITags, registry } from "@server/openApi";
|
import { OpenAPITags, registry } from "@server/openApi";
|
||||||
|
import { rebuildClientAssociationsFromClient } from "@server/lib/rebuildClientAssociations";
|
||||||
|
import { sendTerminateClient } from "./terminate";
|
||||||
|
import { OlmErrorCodes } from "../olm/error";
|
||||||
|
|
||||||
const archiveClientSchema = z.strictObject({
|
const archiveClientSchema = z.strictObject({
|
||||||
clientId: z.string().transform(Number).pipe(z.int().positive())
|
clientId: z.string().transform(Number).pipe(z.int().positive())
|
||||||
@@ -74,6 +77,9 @@ export async function archiveClient(
|
|||||||
.update(clients)
|
.update(clients)
|
||||||
.set({ archived: true })
|
.set({ archived: true })
|
||||||
.where(eq(clients.clientId, clientId));
|
.where(eq(clients.clientId, clientId));
|
||||||
|
|
||||||
|
// Rebuild associations to clean up related data
|
||||||
|
await rebuildClientAssociationsFromClient(client, trx);
|
||||||
});
|
});
|
||||||
|
|
||||||
return response(res, {
|
return response(res, {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { Request, Response, NextFunction } from "express";
|
import { Request, Response, NextFunction } from "express";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { db, olms, users } from "@server/db";
|
import { db, olms } from "@server/db";
|
||||||
import { clients, currentFingerprint } from "@server/db";
|
import { clients, currentFingerprint } from "@server/db";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import response from "@server/lib/response";
|
import response from "@server/lib/response";
|
||||||
@@ -36,7 +36,6 @@ async function query(clientId?: number, niceId?: string, orgId?: string) {
|
|||||||
currentFingerprint,
|
currentFingerprint,
|
||||||
eq(olms.olmId, currentFingerprint.olmId)
|
eq(olms.olmId, currentFingerprint.olmId)
|
||||||
)
|
)
|
||||||
.leftJoin(users, eq(clients.userId, users.userId))
|
|
||||||
.limit(1);
|
.limit(1);
|
||||||
return res;
|
return res;
|
||||||
} else if (niceId && orgId) {
|
} else if (niceId && orgId) {
|
||||||
@@ -49,7 +48,6 @@ async function query(clientId?: number, niceId?: string, orgId?: string) {
|
|||||||
currentFingerprint,
|
currentFingerprint,
|
||||||
eq(olms.olmId, currentFingerprint.olmId)
|
eq(olms.olmId, currentFingerprint.olmId)
|
||||||
)
|
)
|
||||||
.leftJoin(users, eq(clients.userId, users.userId))
|
|
||||||
.limit(1);
|
.limit(1);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
@@ -209,9 +207,6 @@ export type GetClientResponse = NonNullable<
|
|||||||
olmId: string | null;
|
olmId: string | null;
|
||||||
agent: string | null;
|
agent: string | null;
|
||||||
olmVersion: string | null;
|
olmVersion: string | null;
|
||||||
userEmail: string | null;
|
|
||||||
userName: string | null;
|
|
||||||
userUsername: string | null;
|
|
||||||
fingerprint: {
|
fingerprint: {
|
||||||
username: string | null;
|
username: string | null;
|
||||||
hostname: string | null;
|
hostname: string | null;
|
||||||
@@ -327,9 +322,6 @@ export async function getClient(
|
|||||||
olmId: client.olms ? client.olms.olmId : null,
|
olmId: client.olms ? client.olms.olmId : null,
|
||||||
agent: client.olms?.agent || null,
|
agent: client.olms?.agent || null,
|
||||||
olmVersion: client.olms?.version || null,
|
olmVersion: client.olms?.version || null,
|
||||||
userEmail: client.user?.email ?? null,
|
|
||||||
userName: client.user?.name ?? null,
|
|
||||||
userUsername: client.user?.username ?? null,
|
|
||||||
fingerprint: fingerprintData,
|
fingerprint: fingerprintData,
|
||||||
posture: postureData
|
posture: postureData
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
|
|||||||
import { validateSessionToken } from "@server/auth/sessions/app";
|
import { validateSessionToken } from "@server/auth/sessions/app";
|
||||||
import { encodeHexLowerCase } from "@oslojs/encoding";
|
import { encodeHexLowerCase } from "@oslojs/encoding";
|
||||||
import { sha256 } from "@oslojs/crypto/sha2";
|
import { sha256 } from "@oslojs/crypto/sha2";
|
||||||
import { getUserDeviceName } from "@server/db/names";
|
|
||||||
import { buildSiteConfigurationForOlmClient } from "./buildConfiguration";
|
import { buildSiteConfigurationForOlmClient } from "./buildConfiguration";
|
||||||
import { OlmErrorCodes, sendOlmError } from "./error";
|
import { OlmErrorCodes, sendOlmError } from "./error";
|
||||||
import { handleFingerprintInsertion } from "./fingerprintingUtils";
|
import { handleFingerprintInsertion } from "./fingerprintingUtils";
|
||||||
@@ -98,21 +97,6 @@ export const handleOlmRegisterMessage: MessageHandler = async (context) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const deviceModel = fingerprint?.deviceModel ?? null;
|
|
||||||
const computedName = getUserDeviceName(deviceModel, client.name);
|
|
||||||
if (computedName && computedName !== client.name) {
|
|
||||||
await db
|
|
||||||
.update(clients)
|
|
||||||
.set({ name: computedName })
|
|
||||||
.where(eq(clients.clientId, client.clientId));
|
|
||||||
}
|
|
||||||
if (computedName && computedName !== olm.name) {
|
|
||||||
await db
|
|
||||||
.update(olms)
|
|
||||||
.set({ name: computedName })
|
|
||||||
.where(eq(olms.olmId, olm.olmId));
|
|
||||||
}
|
|
||||||
|
|
||||||
const [org] = await db
|
const [org] = await db
|
||||||
.select()
|
.select()
|
||||||
.from(orgs)
|
.from(orgs)
|
||||||
|
|||||||
@@ -64,9 +64,7 @@ export async function ensureSetupToken() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existingToken) {
|
if (existingToken?.token !== envSetupToken) {
|
||||||
// Token exists in DB - update it if different
|
|
||||||
if (existingToken.token !== envSetupToken) {
|
|
||||||
console.warn(
|
console.warn(
|
||||||
"Overwriting existing token in DB since PANGOLIN_SETUP_TOKEN is set"
|
"Overwriting existing token in DB since PANGOLIN_SETUP_TOKEN is set"
|
||||||
);
|
);
|
||||||
@@ -75,9 +73,7 @@ export async function ensureSetupToken() {
|
|||||||
.update(setupTokens)
|
.update(setupTokens)
|
||||||
.set({ token: envSetupToken })
|
.set({ token: envSetupToken })
|
||||||
.where(eq(setupTokens.tokenId, existingToken.tokenId));
|
.where(eq(setupTokens.tokenId, existingToken.tokenId));
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// No existing token - insert new one
|
|
||||||
const tokenId = generateId(15);
|
const tokenId = generateId(15);
|
||||||
|
|
||||||
await db.insert(setupTokens).values({
|
await db.insert(setupTokens).values({
|
||||||
|
|||||||
@@ -43,52 +43,25 @@ export type AuthPageCustomizationProps = {
|
|||||||
|
|
||||||
const AuthPageFormSchema = z.object({
|
const AuthPageFormSchema = z.object({
|
||||||
logoUrl: z.union([
|
logoUrl: z.union([
|
||||||
z.literal(""),
|
z.string().length(0),
|
||||||
z.url("Must be a valid URL").superRefine(async (url, ctx) => {
|
z.url().refine(
|
||||||
|
async (url) => {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url);
|
||||||
method: "HEAD"
|
return (
|
||||||
}).catch(() => {
|
response.status === 200 &&
|
||||||
// If HEAD fails (CORS or method not allowed), try GET
|
(response.headers.get("content-type") ?? "").startsWith(
|
||||||
return fetch(url, { method: "GET" });
|
"image/"
|
||||||
});
|
)
|
||||||
|
);
|
||||||
if (response.status !== 200) {
|
|
||||||
ctx.addIssue({
|
|
||||||
code: "custom",
|
|
||||||
message: `Failed to load image. Please check that the URL is accessible.`
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const contentType = response.headers.get("content-type") ?? "";
|
|
||||||
if (!contentType.startsWith("image/")) {
|
|
||||||
ctx.addIssue({
|
|
||||||
code: "custom",
|
|
||||||
message: `URL does not point to an image. Please provide a URL to an image file (e.g., .png, .jpg, .svg).`
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
let errorMessage =
|
return false;
|
||||||
"Unable to verify image URL. Please check that the URL is accessible and points to an image file.";
|
|
||||||
|
|
||||||
if (
|
|
||||||
error instanceof TypeError &&
|
|
||||||
error.message.includes("fetch")
|
|
||||||
) {
|
|
||||||
errorMessage =
|
|
||||||
"Network error: Unable to reach the URL. Please check your internet connection and verify the URL is correct.";
|
|
||||||
} else if (error instanceof Error) {
|
|
||||||
errorMessage = `Error verifying URL: ${error.message}`;
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
ctx.addIssue({
|
{
|
||||||
code: "custom",
|
error: "Invalid logo URL, must be a valid image URL"
|
||||||
message: errorMessage
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
]),
|
]),
|
||||||
logoWidth: z.coerce.number<number>().min(1),
|
logoWidth: z.coerce.number<number>().min(1),
|
||||||
logoHeight: z.coerce.number<number>().min(1),
|
logoHeight: z.coerce.number<number>().min(1),
|
||||||
@@ -432,7 +405,9 @@ export default function AuthPageBrandingForm({
|
|||||||
<Button
|
<Button
|
||||||
variant="destructive"
|
variant="destructive"
|
||||||
type="submit"
|
type="submit"
|
||||||
loading={isDeletingBranding}
|
loading={
|
||||||
|
isUpdatingBranding || isDeletingBranding
|
||||||
|
}
|
||||||
disabled={
|
disabled={
|
||||||
isUpdatingBranding ||
|
isUpdatingBranding ||
|
||||||
isDeletingBranding ||
|
isDeletingBranding ||
|
||||||
@@ -447,7 +422,7 @@ export default function AuthPageBrandingForm({
|
|||||||
<Button
|
<Button
|
||||||
type="submit"
|
type="submit"
|
||||||
form="auth-page-branding-form"
|
form="auth-page-branding-form"
|
||||||
loading={isUpdatingBranding}
|
loading={isUpdatingBranding || isDeletingBranding}
|
||||||
disabled={
|
disabled={
|
||||||
isUpdatingBranding ||
|
isUpdatingBranding ||
|
||||||
isDeletingBranding ||
|
isDeletingBranding ||
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import {
|
|||||||
InfoSections,
|
InfoSections,
|
||||||
InfoSectionTitle
|
InfoSectionTitle
|
||||||
} from "@app/components/InfoSection";
|
} from "@app/components/InfoSection";
|
||||||
import { getUserDisplayName } from "@app/lib/getUserDisplayName";
|
|
||||||
import { useTranslations } from "next-intl";
|
import { useTranslations } from "next-intl";
|
||||||
|
|
||||||
type ClientInfoCardProps = {};
|
type ClientInfoCardProps = {};
|
||||||
@@ -17,12 +16,6 @@ export default function SiteInfoCard({}: ClientInfoCardProps) {
|
|||||||
const { client, updateClient } = useClientContext();
|
const { client, updateClient } = useClientContext();
|
||||||
const t = useTranslations();
|
const t = useTranslations();
|
||||||
|
|
||||||
const userDisplayName = getUserDisplayName({
|
|
||||||
email: client.userEmail,
|
|
||||||
name: client.userName,
|
|
||||||
username: client.userUsername
|
|
||||||
});
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Alert>
|
<Alert>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
@@ -32,12 +25,8 @@ export default function SiteInfoCard({}: ClientInfoCardProps) {
|
|||||||
<InfoSectionContent>{client.name}</InfoSectionContent>
|
<InfoSectionContent>{client.name}</InfoSectionContent>
|
||||||
</InfoSection>
|
</InfoSection>
|
||||||
<InfoSection>
|
<InfoSection>
|
||||||
<InfoSectionTitle>
|
<InfoSectionTitle>{t("identifier")}</InfoSectionTitle>
|
||||||
{userDisplayName ? t("user") : t("identifier")}
|
<InfoSectionContent>{client.niceId}</InfoSectionContent>
|
||||||
</InfoSectionTitle>
|
|
||||||
<InfoSectionContent>
|
|
||||||
{userDisplayName || client.niceId}
|
|
||||||
</InfoSectionContent>
|
|
||||||
</InfoSection>
|
</InfoSection>
|
||||||
<InfoSection>
|
<InfoSection>
|
||||||
<InfoSectionTitle>{t("status")}</InfoSectionTitle>
|
<InfoSectionTitle>{t("status")}</InfoSectionTitle>
|
||||||
|
|||||||
@@ -1,13 +1,41 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
import * as React from "react";
|
||||||
|
import * as NProgress from "nprogress";
|
||||||
import NextTopLoader from "nextjs-toploader";
|
import NextTopLoader from "nextjs-toploader";
|
||||||
|
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||||
|
|
||||||
export function TopLoader() {
|
export function TopLoader() {
|
||||||
return (
|
return (
|
||||||
<NextTopLoader
|
<>
|
||||||
color="var(--color-primary)"
|
<NextTopLoader showSpinner={false} color="var(--color-primary)" />
|
||||||
showSpinner={false}
|
<FinishingLoader />
|
||||||
height={2}
|
</>
|
||||||
/>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function FinishingLoader() {
|
||||||
|
const pathname = usePathname();
|
||||||
|
const router = useRouter();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
|
React.useEffect(() => {
|
||||||
|
NProgress.done();
|
||||||
|
}, [pathname, router, searchParams]);
|
||||||
|
React.useEffect(() => {
|
||||||
|
const linkClickListener = (ev: MouseEvent) => {
|
||||||
|
const element = ev.target as HTMLElement;
|
||||||
|
const closestlink = element.closest("a");
|
||||||
|
const isOpenToNewTabClick =
|
||||||
|
ev.ctrlKey ||
|
||||||
|
ev.shiftKey ||
|
||||||
|
ev.metaKey || // apple
|
||||||
|
(ev.button && ev.button == 1); // middle click, >IE9 + everyone else
|
||||||
|
|
||||||
|
if (closestlink && isOpenToNewTabClick) {
|
||||||
|
NProgress.done();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
window.addEventListener("click", linkClickListener);
|
||||||
|
return () => window.removeEventListener("click", linkClickListener);
|
||||||
|
}, []);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import {
|
|||||||
TableRow
|
TableRow
|
||||||
} from "@app/components/ui/table";
|
} from "@app/components/ui/table";
|
||||||
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@app/components/ui/tabs";
|
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@app/components/ui/tabs";
|
||||||
|
import ConfirmDeleteDialog from "@app/components/ConfirmDeleteDialog";
|
||||||
import { Loader2, RefreshCw } from "lucide-react";
|
import { Loader2, RefreshCw } from "lucide-react";
|
||||||
import moment from "moment";
|
import moment from "moment";
|
||||||
import { useUserContext } from "@app/hooks/useUserContext";
|
import { useUserContext } from "@app/hooks/useUserContext";
|
||||||
@@ -58,6 +59,8 @@ export default function ViewDevicesDialog({
|
|||||||
|
|
||||||
const [devices, setDevices] = useState<Device[]>([]);
|
const [devices, setDevices] = useState<Device[]>([]);
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [isArchiveModalOpen, setIsArchiveModalOpen] = useState(false);
|
||||||
|
const [selectedDevice, setSelectedDevice] = useState<Device | null>(null);
|
||||||
const [activeTab, setActiveTab] = useState<"available" | "archived">("available");
|
const [activeTab, setActiveTab] = useState<"available" | "archived">("available");
|
||||||
|
|
||||||
const fetchDevices = async () => {
|
const fetchDevices = async () => {
|
||||||
@@ -105,6 +108,8 @@ export default function ViewDevicesDialog({
|
|||||||
d.olmId === olmId ? { ...d, archived: true } : d
|
d.olmId === olmId ? { ...d, archived: true } : d
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
setIsArchiveModalOpen(false);
|
||||||
|
setSelectedDevice(null);
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("Error archiving device:", error);
|
console.error("Error archiving device:", error);
|
||||||
toast({
|
toast({
|
||||||
@@ -148,6 +153,8 @@ export default function ViewDevicesDialog({
|
|||||||
|
|
||||||
function reset() {
|
function reset() {
|
||||||
setDevices([]);
|
setDevices([]);
|
||||||
|
setSelectedDevice(null);
|
||||||
|
setIsArchiveModalOpen(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -256,7 +263,12 @@ export default function ViewDevicesDialog({
|
|||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
archiveDevice(device.olmId);
|
setSelectedDevice(
|
||||||
|
device
|
||||||
|
);
|
||||||
|
setIsArchiveModalOpen(
|
||||||
|
true
|
||||||
|
);
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{t(
|
{t(
|
||||||
@@ -349,6 +361,34 @@ export default function ViewDevicesDialog({
|
|||||||
</CredenzaFooter>
|
</CredenzaFooter>
|
||||||
</CredenzaContent>
|
</CredenzaContent>
|
||||||
</Credenza>
|
</Credenza>
|
||||||
|
|
||||||
|
{selectedDevice && (
|
||||||
|
<ConfirmDeleteDialog
|
||||||
|
open={isArchiveModalOpen}
|
||||||
|
setOpen={(val) => {
|
||||||
|
setIsArchiveModalOpen(val);
|
||||||
|
if (!val) {
|
||||||
|
setSelectedDevice(null);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
dialog={
|
||||||
|
<div className="space-y-2">
|
||||||
|
<p>
|
||||||
|
{t("deviceQuestionArchive") ||
|
||||||
|
"Are you sure you want to archive this device?"}
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
{t("deviceMessageArchive") ||
|
||||||
|
"The device will be archived and removed from your active devices list."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
buttonText={t("deviceArchiveConfirm") || "Archive Device"}
|
||||||
|
onConfirm={async () => archiveDevice(selectedDevice.olmId)}
|
||||||
|
string={selectedDevice.name || selectedDevice.olmId}
|
||||||
|
title={t("archiveDevice") || "Archive Device"}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ export function NewtSiteInstallCommands({
|
|||||||
- NEWT_SECRET=${secret}${acceptClientsEnv}`
|
- NEWT_SECRET=${secret}${acceptClientsEnv}`
|
||||||
],
|
],
|
||||||
"Docker Run": [
|
"Docker Run": [
|
||||||
`docker run -dit --network host fosrl/newt --id ${id} --secret ${secret} --endpoint ${endpoint}${acceptClientsFlag}`
|
`docker run -dit fosrl/newt --id ${id} --secret ${secret} --endpoint ${endpoint}${acceptClientsFlag}`
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
kubernetes: {
|
kubernetes: {
|
||||||
|
|||||||
Reference in New Issue
Block a user