Merge pull request 'new prod env' (#3) from development into main
All checks were successful
CI/CD Pipeline / Build and Push App Docker Image (push) Successful in 1m43s
CI/CD Pipeline / Build and Push PocketBase Docker Image (push) Successful in 13s
CI/CD Pipeline / Deploy to Kubernetes (push) Successful in 46s

Reviewed-on: #3
This commit is contained in:
2026-02-09 12:59:07 -06:00
41 changed files with 1564 additions and 725 deletions

143
.gitea/workflows/ci-cd.yaml Normal file
View File

@@ -0,0 +1,143 @@
name: CI/CD Pipeline
on:
push:
branches:
- development
- main
jobs:
build-app:
name: Build and Push App Docker Image
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set environment variables
run: |
if [ "${{ github.ref }}" == "refs/heads/main" ]; then
echo "DOCKER_TAG=latest" >> $GITHUB_ENV
echo "ENVIRONMENT=prod" >> $GITHUB_ENV
else
echo "DOCKER_TAG=dev" >> $GITHUB_ENV
echo "ENVIRONMENT=dev" >> $GITHUB_ENV
fi
- name: Build and push app Docker image
run: |
docker login git.yohler.net -u ${{ github.actor }} -p ${{ secrets.PACKAGES_TOKEN }}
docker build \
-f Dockerfile \
-t git.yohler.net/kyle/flxn-app:${{ env.DOCKER_TAG }} \
-t git.yohler.net/kyle/flxn-app:${{ github.sha }} \
.
docker push git.yohler.net/kyle/flxn-app:${{ env.DOCKER_TAG }}
docker push git.yohler.net/kyle/flxn-app:${{ github.sha }}
build-pocketbase:
name: Build and Push PocketBase Docker Image
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check if PocketBase Dockerfile or migrations changed
id: check_changes
run: |
if [ "${{ github.event.before }}" == "0000000000000000000000000000000000000000" ] || ! git cat-file -e ${{ github.event.before }} 2>/dev/null; then
echo "changed=true" >> $GITHUB_OUTPUT
elif git diff --name-only ${{ github.event.before }} ${{ github.sha }} | grep -qE "(Dockerfile.pocketbase|pb_migrations/)"; then
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Build and push PocketBase Docker image
if: steps.check_changes.outputs.changed == 'true' || github.event.before == '0000000000000000000000000000000000000000'
run: |
docker login git.yohler.net -u ${{ github.actor }} -p ${{ secrets.PACKAGES_TOKEN }}
docker build \
-f Dockerfile.pocketbase \
-t git.yohler.net/kyle/flxn-pocketbase:latest \
-t git.yohler.net/kyle/flxn-pocketbase:${{ github.sha }} \
.
docker push git.yohler.net/kyle/flxn-pocketbase:latest
docker push git.yohler.net/kyle/flxn-pocketbase:${{ github.sha }}
deploy:
name: Deploy to Kubernetes
needs: [build-app, build-pocketbase]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set environment variables
run: |
if [ "${{ github.ref }}" == "refs/heads/main" ]; then
echo "ENVIRONMENT=prod" >> $GITHUB_ENV
echo "NAMESPACE=flxn-prod" >> $GITHUB_ENV
else
echo "ENVIRONMENT=dev" >> $GITHUB_ENV
echo "NAMESPACE=flxn-dev" >> $GITHUB_ENV
fi
- name: Install kubectl
run: |
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
chmod +x kubectl
mv kubectl /usr/local/bin/
- name: Configure kubectl
run: |
mkdir -p $HOME/.kube
echo "${{ secrets.KUBE_CONFIG }}" | base64 -d > $HOME/.kube/config
chmod 600 $HOME/.kube/config
kubectl config set-cluster local --insecure-skip-tls-verify=true
- name: Verify kubectl access
run: |
kubectl version --client
kubectl get nodes
- name: Deploy shared services (SuperTokens)
run: |
kubectl apply -k k8s/overlays/shared/
- name: Deploy to ${{ env.ENVIRONMENT }}
run: |
kubectl apply -k k8s/overlays/${{ env.ENVIRONMENT }}/
- name: Force rollout to pull latest image
run: |
kubectl rollout restart deployment/flxn-app -n ${{ env.NAMESPACE }}
kubectl rollout restart deployment/flxn-pocketbase -n ${{ env.NAMESPACE }}
- name: Wait for rollout
run: |
kubectl rollout status deployment/flxn-app -n ${{ env.NAMESPACE }} --timeout=5m
kubectl rollout status deployment/flxn-pocketbase -n ${{ env.NAMESPACE }} --timeout=5m
- name: Verify deployment
run: |
kubectl get pods -n ${{ env.NAMESPACE }} -l app=flxn
kubectl get svc -n ${{ env.NAMESPACE }} -l app=flxn
kubectl get ingress -n ${{ env.NAMESPACE }}
- name: Check app health
run: |
sleep 15
APP_POD=$(kubectl get pod -n ${{ env.NAMESPACE }} -l component=app -o jsonpath='{.items[0].metadata.name}')
kubectl exec -n ${{ env.NAMESPACE }} $APP_POD -- wget -O- http://localhost:3000/api/health || echo "Health check failed (endpoint may not exist yet)"
- name: Check PocketBase health
run: |
PB_POD=$(kubectl get pod -n ${{ env.NAMESPACE }} -l component=pocketbase -o jsonpath='{.items[0].metadata.name}')
kubectl exec -n ${{ env.NAMESPACE }} $PB_POD -- wget -O- http://localhost:8090/api/health || echo "PocketBase health check completed"

29
Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
FROM oven/bun:1 AS builder
WORKDIR /app
COPY package.json bun.lockb* ./
RUN bun install --frozen-lockfile
COPY . .
RUN bun run build
FROM oven/bun:1-alpine AS runtime
WORKDIR /app
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/server.ts ./server.ts
EXPOSE 3000
ENV NODE_ENV=production
ENV PORT=3000
ENV NITRO_PORT=3000
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
CMD bun -e "fetch('http://localhost:3000/api/health').then(r => r.ok ? process.exit(0) : process.exit(1)).catch(() => process.exit(1))"
CMD ["bun", "run", "server.ts"]

View File

@@ -1,16 +1,23 @@
FROM alpine:latest
ARG PB_VERSION=0.29.2
ARG PB_VERSION=0.26.5
RUN apk add --no-cache \
unzip \
ca-certificates
# download and unzip PocketBase
ADD https://github.com/pocketbase/pocketbase/releases/download/v${PB_VERSION}/pocketbase_${PB_VERSION}_linux_amd64.zip /tmp/pb.zip
RUN unzip /tmp/pb.zip -d /pb/
RUN unzip /tmp/pb.zip -d /pb/ && \
rm /tmp/pb.zip && \
chmod +x /pb/pocketbase
RUN mkdir -p /pb/pb_data
COPY pb_migrations /pb/pb_migrations
EXPOSE 8090
# start PocketBase
CMD ["/pb/pocketbase", "serve", "--http=0.0.0.0:8090"]
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
CMD wget --no-verbose --tries=1 --spider http://localhost:8090/api/health || exit 1
CMD ["/pb/pocketbase", "serve", "--http=0.0.0.0:8090", "--dir=/pb/pb_data", "--migrationsDir=/pb/pb_migrations"]

Binary file not shown.

623
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,6 @@ services:
- .env.docker
volumes:
- postgres-data:/var/lib/postgresql/data
- ./.docker-postgres-init:/docker-entrypoint-initdb.d
networks:
- app-network

View File

@@ -0,0 +1,124 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: flxn-app
labels:
app: flxn
component: app
spec:
replicas: 1
selector:
matchLabels:
app: flxn
component: app
template:
metadata:
labels:
app: flxn
component: app
spec:
containers:
- name: flxn-app
image: git.yohler.net/kyle/flxn-app:latest
imagePullPolicy: Always
ports:
- containerPort: 3000
name: http
protocol: TCP
env:
- name: VITE_API_DOMAIN
valueFrom:
configMapKeyRef:
name: flxn-config
key: vite_api_domain
- name: VITE_WEBSITE_DOMAIN
valueFrom:
configMapKeyRef:
name: flxn-config
key: vite_website_domain
- name: SUPERTOKENS_URI
valueFrom:
configMapKeyRef:
name: flxn-config
key: supertokens_uri
- name: POCKETBASE_URL
valueFrom:
configMapKeyRef:
name: flxn-config
key: pocketbase_url
- name: SUPERTOKENS_API_KEY
valueFrom:
secretKeyRef:
name: flxn-secrets
key: supertokens_api_key
- name: PORT
value: "3000"
- name: NODE_ENV
value: "production"
- name: TWILIO_ACCOUNT_SID
valueFrom:
secretKeyRef:
name: flxn-secrets
key: twilio_account_sid
- name: TWILIO_AUTH_TOKEN
valueFrom:
secretKeyRef:
name: flxn-secrets
key: twilio_auth_token
- name: TWILIO_SERVICE_SID
valueFrom:
secretKeyRef:
name: flxn-secrets
key: twilio_service_sid
- name: POCKETBASE_ADMIN_EMAIL
valueFrom:
secretKeyRef:
name: flxn-secrets
key: pocketbase_admin_email
- name: POCKETBASE_ADMIN_PASSWORD
valueFrom:
secretKeyRef:
name: flxn-secrets
key: pocketbase_admin_password
- name: VITE_SPOTIFY_CLIENT_ID
valueFrom:
configMapKeyRef:
name: flxn-config
key: vite_spotify_client_id
- name: SPOTIFY_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: flxn-secrets
key: spotify_client_secret
- name: VITE_SPOTIFY_REDIRECT_URI
valueFrom:
configMapKeyRef:
name: flxn-config
key: vite_spotify_redirect_uri
resources:
requests:
memory: "768Mi"
cpu: "250m"
limits:
memory: "1536Mi"
cpu: "1000m"
livenessProbe:
httpGet:
path: /api/health
port: 3000
initialDelaySeconds: 30
periodSeconds: 30
timeoutSeconds: 5
failureThreshold: 3
readinessProbe:
httpGet:
path: /api/health
port: 3000
initialDelaySeconds: 10
periodSeconds: 10
timeoutSeconds: 3
failureThreshold: 3

17
k8s/base/app-service.yaml Normal file
View File

@@ -0,0 +1,17 @@
apiVersion: v1
kind: Service
metadata:
name: flxn-app
labels:
app: flxn
component: app
spec:
type: ClusterIP
ports:
- port: 3000
targetPort: 3000
protocol: TCP
name: http
selector:
app: flxn
component: app

View File

@@ -0,0 +1,12 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- app-deployment.yaml
- app-service.yaml
- pocketbase-deployment.yaml
- pocketbase-service.yaml
- pb-data-pvc.yaml
commonLabels:
app: flxn

13
k8s/base/pb-data-pvc.yaml Normal file
View File

@@ -0,0 +1,13 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: flxn-pb-data
labels:
app: flxn
component: pocketbase
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi

View File

@@ -0,0 +1,57 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: flxn-pocketbase
labels:
app: flxn
component: pocketbase
spec:
replicas: 1
selector:
matchLabels:
app: flxn
component: pocketbase
template:
metadata:
labels:
app: flxn
component: pocketbase
spec:
containers:
- name: pocketbase
image: git.yohler.net/kyle/flxn-pocketbase:latest
imagePullPolicy: Always
ports:
- containerPort: 8090
name: http
protocol: TCP
volumeMounts:
- name: pb-data
mountPath: /pb/pb_data
resources:
requests:
memory: "256Mi"
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
livenessProbe:
httpGet:
path: /api/health
port: 8090
initialDelaySeconds: 10
periodSeconds: 30
timeoutSeconds: 5
failureThreshold: 3
readinessProbe:
httpGet:
path: /api/health
port: 8090
initialDelaySeconds: 5
periodSeconds: 10
timeoutSeconds: 3
failureThreshold: 3
volumes:
- name: pb-data
persistentVolumeClaim:
claimName: flxn-pb-data

View File

@@ -0,0 +1,18 @@
apiVersion: v1
kind: Service
metadata:
name: flxn-pocketbase
labels:
app: flxn
component: pocketbase
spec:
type: NodePort
ports:
- port: 8090
targetPort: 8090
nodePort: 30090
protocol: TCP
name: http
selector:
app: flxn
component: pocketbase

View File

@@ -0,0 +1,14 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: flxn-config
namespace: flxn-dev
data:
vite_api_domain: "https://dev.flexxon.app"
vite_website_domain: "https://dev.flexxon.app"
supertokens_uri: "http://192.168.0.50:30568"
pocketbase_url: "http://192.168.0.50:30096"
vite_spotify_client_id: "3ffde6b594e84460b3d4b329b8919277"
vite_spotify_redirect_uri: "https://dev.flexxon.app/api/spotify/callback"
s3_endpoint: "https://s3.yohler.net"
s3_bucket: "flxn-dev"

View File

@@ -0,0 +1,17 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: flxn-app
namespace: flxn-dev
spec:
rules:
- host: dev.flexxon.app
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: flxn-app
port:
number: 3000

View File

@@ -0,0 +1,50 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: flxn-dev
resources:
- namespace.yaml
- ../../base
- configmap.yaml
- ingress.yaml
images:
- name: git.yohler.net/kyle/flxn-app
newTag: dev
- name: git.yohler.net/kyle/flxn-pocketbase
newTag: latest
commonLabels:
environment: dev
patches:
- patch: |-
- op: replace
path: /spec/template/spec/containers/0/resources/requests/memory
value: "768Mi"
- op: replace
path: /spec/template/spec/containers/0/resources/limits/memory
value: "1536Mi"
target:
kind: Deployment
name: flxn-app
- patch: |-
- op: replace
path: /spec/type
value: NodePort
- op: add
path: /spec/ports/0/nodePort
value: 30083
target:
kind: Service
name: flxn-app
- patch: |-
- op: replace
path: /spec/ports/0/nodePort
value: 30096
target:
kind: Service
name: flxn-pocketbase

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: flxn-dev

View File

@@ -0,0 +1,14 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: flxn-config
namespace: flxn-prod
data:
vite_api_domain: "https://flexxon.app"
vite_website_domain: "https://flexxon.app"
supertokens_uri: "http://192.168.0.50:30568"
pocketbase_url: "http://192.168.0.50:30097"
vite_spotify_client_id: "3ffde6b594e84460b3d4b329b8919277"
vite_spotify_redirect_uri: "https://flexxon.app/api/spotify/callback"
s3_endpoint: "https://s3.yohler.net"
s3_bucket: "flxn-prod"

View File

@@ -0,0 +1,17 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: flxn-app
namespace: flxn-prod
spec:
rules:
- host: flexxon.app
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: flxn-app
port:
number: 3000

View File

@@ -0,0 +1,50 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: flxn-prod
resources:
- namespace.yaml
- ../../base
- configmap.yaml
- ingress.yaml
images:
- name: git.yohler.net/kyle/flxn-app
newTag: latest
- name: git.yohler.net/kyle/flxn-pocketbase
newTag: latest
commonLabels:
environment: prod
patches:
- patch: |-
- op: replace
path: /spec/template/spec/containers/0/resources/requests/memory
value: "1536Mi"
- op: replace
path: /spec/template/spec/containers/0/resources/limits/memory
value: "3Gi"
target:
kind: Deployment
name: flxn-app
- patch: |-
- op: replace
path: /spec/type
value: NodePort
- op: add
path: /spec/ports/0/nodePort
value: 30084
target:
kind: Service
name: flxn-app
- patch: |-
- op: replace
path: /spec/ports/0/nodePort
value: 30097
target:
kind: Service
name: flxn-pocketbase

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: flxn-prod

View File

@@ -0,0 +1,5 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: supertokens-config
namespace: flxn-shared

View File

@@ -0,0 +1,16 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: flxn-shared
resources:
- namespace.yaml
- supertokens-deployment.yaml
- supertokens-service.yaml
- supertokens-postgres-deployment.yaml
- supertokens-postgres-service.yaml
- supertokens-db-pvc.yaml
labels:
- pairs:
environment: shared

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: flxn-shared

View File

@@ -0,0 +1,13 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: supertokens-db-data
labels:
app: flxn
component: supertokens-db
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 2Gi

View File

@@ -0,0 +1,71 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: supertokens
labels:
app: flxn
component: supertokens
spec:
replicas: 1
selector:
matchLabels:
app: flxn
component: supertokens
template:
metadata:
labels:
app: flxn
component: supertokens
spec:
enableServiceLinks: false
containers:
- name: supertokens
image: registry.supertokens.io/supertokens/supertokens-postgresql:latest
ports:
- containerPort: 3567
name: http
protocol: TCP
env:
- name: PORT
value: "3567"
- name: POSTGRESQL_USER
value: supertokens
- name: POSTGRESQL_PASSWORD
valueFrom:
secretKeyRef:
name: supertokens-secrets
key: postgres_password
- name: POSTGRESQL_HOST
value: supertokens-db
- name: POSTGRESQL_PORT
value: "5432"
- name: POSTGRESQL_DATABASE_NAME
value: supertokens
- name: API_KEYS
valueFrom:
secretKeyRef:
name: supertokens-secrets
key: api_keys
resources:
requests:
memory: "256Mi"
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
livenessProbe:
httpGet:
path: /hello
port: 3567
initialDelaySeconds: 30
periodSeconds: 30
timeoutSeconds: 5
failureThreshold: 3
readinessProbe:
httpGet:
path: /hello
port: 3567
initialDelaySeconds: 10
periodSeconds: 10
timeoutSeconds: 3
failureThreshold: 3

View File

@@ -0,0 +1,68 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: supertokens-db
labels:
app: flxn
component: supertokens-db
spec:
replicas: 1
selector:
matchLabels:
app: flxn
component: supertokens-db
template:
metadata:
labels:
app: flxn
component: supertokens-db
spec:
containers:
- name: postgres
image: postgres:16-alpine
ports:
- containerPort: 5432
name: postgres
protocol: TCP
env:
- name: POSTGRES_USER
value: "supertokens"
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: supertokens-secrets
key: postgres_password
- name: POSTGRES_DB
value: "supertokens"
- name: PGDATA
value: "/var/lib/postgresql/data/pgdata"
volumeMounts:
- name: postgres-data
mountPath: /var/lib/postgresql/data
resources:
requests:
memory: "128Mi"
cpu: "50m"
limits:
memory: "256Mi"
cpu: "500m"
livenessProbe:
exec:
command:
- pg_isready
- -U
- supertokens
initialDelaySeconds: 30
periodSeconds: 10
readinessProbe:
exec:
command:
- pg_isready
- -U
- supertokens
initialDelaySeconds: 5
periodSeconds: 5
volumes:
- name: postgres-data
persistentVolumeClaim:
claimName: supertokens-db-data

View File

@@ -0,0 +1,17 @@
apiVersion: v1
kind: Service
metadata:
name: supertokens-db
labels:
app: flxn
component: supertokens-db
spec:
type: ClusterIP
ports:
- port: 5432
targetPort: 5432
protocol: TCP
name: postgres
selector:
app: flxn
component: supertokens-db

View File

@@ -0,0 +1,18 @@
apiVersion: v1
kind: Service
metadata:
name: supertokens
labels:
app: flxn
component: supertokens
spec:
type: NodePort
ports:
- port: 3567
targetPort: 3567
nodePort: 30568
protocol: TCP
name: http
selector:
app: flxn
component: supertokens

View File

@@ -6,8 +6,7 @@
"scripts": {
"dev": "vite dev --host 0.0.0.0",
"build": "vite build && tsc --noEmit",
"start": "bun run .output/server/index.mjs",
"start:node": "node .output/server/index.mjs"
"start": "bun run server.ts"
},
"dependencies": {
"@hello-pangea/dnd": "^18.0.1",
@@ -23,10 +22,10 @@
"@tanstack/react-devtools": "^0.7.6",
"@tanstack/react-query": "^5.66.0",
"@tanstack/react-query-devtools": "^5.66.0",
"@tanstack/react-router": "^1.130.12",
"@tanstack/react-router-devtools": "^1.130.13",
"@tanstack/react-router-with-query": "^1.130.12",
"@tanstack/react-start": "^1.132.2",
"@tanstack/react-router": "^1.143.6",
"@tanstack/react-router-devtools": "^1.143.6",
"@tanstack/react-router-ssr-query": "^1.143.6",
"@tanstack/react-start": "^1.143.6",
"@tanstack/react-virtual": "^3.13.12",
"@tiptap/pm": "^3.4.3",
"@tiptap/react": "^3.4.3",
@@ -57,7 +56,6 @@
"zustand": "^5.0.7"
},
"devDependencies": {
"@tanstack/react-router-ssr-query": "^1.132.2",
"@tanstack/router-plugin": "^1.132.2",
"@types/node": "^22.5.4",
"@types/pg": "^8.15.5",

View File

@@ -1,8 +1,12 @@
/// <reference path="../pb_data/types.d.ts" />
migrate((app) => {
const collection = app.findCollectionByNameOrId("pbc_4251874343");
return app.delete(collection);
try {
const collection = app.findCollectionByNameOrId("pbc_4251874343");
return app.delete(collection);
} catch (e) {
console.log("Collection pbc_4251874343 not found, skipping deletion");
return null;
}
}, (app) => {
const collection = new Collection({
"createRule": null,

407
server.ts
View File

@@ -16,73 +16,133 @@
* - Server port number
* - Default: 3000
*
* STATIC_PRELOAD_MAX_BYTES (number)
* ASSET_PRELOAD_MAX_SIZE (number)
* - Maximum file size in bytes to preload into memory
* - Files larger than this will be served on-demand from disk
* - Default: 5242880 (5MB)
* - Example: STATIC_PRELOAD_MAX_BYTES=5242880 (5MB)
* - Example: ASSET_PRELOAD_MAX_SIZE=5242880 (5MB)
*
* STATIC_PRELOAD_INCLUDE (string)
* ASSET_PRELOAD_INCLUDE_PATTERNS (string)
* - Comma-separated list of glob patterns for files to include
* - If specified, only matching files are eligible for preloading
* - Patterns are matched against filenames only, not full paths
* - Example: STATIC_PRELOAD_INCLUDE="*.js,*.css,*.woff2"
* - Example: ASSET_PRELOAD_INCLUDE_PATTERNS="*.js,*.css,*.woff2"
*
* STATIC_PRELOAD_EXCLUDE (string)
* ASSET_PRELOAD_EXCLUDE_PATTERNS (string)
* - Comma-separated list of glob patterns for files to exclude
* - Applied after include patterns
* - Patterns are matched against filenames only, not full paths
* - Example: STATIC_PRELOAD_EXCLUDE="*.map,*.txt"
* - Example: ASSET_PRELOAD_EXCLUDE_PATTERNS="*.map,*.txt"
*
* STATIC_PRELOAD_VERBOSE (boolean)
* ASSET_PRELOAD_VERBOSE_LOGGING (boolean)
* - Enable detailed logging of loaded and skipped files
* - Default: false
* - Set to "true" to enable verbose output
*
* ASSET_PRELOAD_ENABLE_ETAG (boolean)
* - Enable ETag generation for preloaded assets
* - Default: true
* - Set to "false" to disable ETag support
*
* ASSET_PRELOAD_ENABLE_GZIP (boolean)
* - Enable Gzip compression for eligible assets
* - Default: true
* - Set to "false" to disable Gzip compression
*
* ASSET_PRELOAD_GZIP_MIN_SIZE (number)
* - Minimum file size in bytes required for Gzip compression
* - Files smaller than this will not be compressed
* - Default: 1024 (1KB)
*
* ASSET_PRELOAD_GZIP_MIME_TYPES (string)
* - Comma-separated list of MIME types eligible for Gzip compression
* - Supports partial matching for types ending with "/"
* - Default: text/,application/javascript,application/json,application/xml,image/svg+xml
*
* Usage:
* bun run server.ts
*/
import { readdir } from 'node:fs/promises'
import { join } from 'node:path'
import path from 'node:path'
// Configuration
const PORT = Number(process.env.PORT ?? 3000)
const CLIENT_DIR = './dist/client'
const SERVER_ENTRY = './dist/server/server.js'
const SERVER_PORT = Number(process.env.PORT ?? 3000)
const CLIENT_DIRECTORY = './dist/client'
const SERVER_ENTRY_POINT = './dist/server/server.js'
// Logging utilities for professional output
const log = {
info: (message: string) => {
console.log(`[INFO] ${message}`)
},
success: (message: string) => {
console.log(`[SUCCESS] ${message}`)
},
warning: (message: string) => {
console.log(`[WARNING] ${message}`)
},
error: (message: string) => {
console.log(`[ERROR] ${message}`)
},
header: (message: string) => {
console.log(`\n${message}\n`)
},
}
// Preloading configuration from environment variables
const MAX_PRELOAD_BYTES = Number(
process.env.STATIC_PRELOAD_MAX_BYTES ?? 5 * 1024 * 1024, // 5MB default
process.env.ASSET_PRELOAD_MAX_SIZE ?? 5 * 1024 * 1024, // 5MB default
)
// Parse comma-separated include patterns (no defaults)
const INCLUDE_PATTERNS = (process.env.STATIC_PRELOAD_INCLUDE ?? '')
const INCLUDE_PATTERNS = (process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? '')
.split(',')
.map((s) => s.trim())
.filter(Boolean)
.map(globToRegExp)
.map((pattern: string) => convertGlobToRegExp(pattern))
// Parse comma-separated exclude patterns (no defaults)
const EXCLUDE_PATTERNS = (process.env.STATIC_PRELOAD_EXCLUDE ?? '')
const EXCLUDE_PATTERNS = (process.env.ASSET_PRELOAD_EXCLUDE_PATTERNS ?? '')
.split(',')
.map((s) => s.trim())
.filter(Boolean)
.map(globToRegExp)
.map((pattern: string) => convertGlobToRegExp(pattern))
// Verbose logging flag
const VERBOSE = process.env.STATIC_PRELOAD_VERBOSE === 'true'
const VERBOSE = process.env.ASSET_PRELOAD_VERBOSE_LOGGING === 'true'
// Optional ETag feature
const ENABLE_ETAG = (process.env.ASSET_PRELOAD_ENABLE_ETAG ?? 'true') === 'true'
// Optional Gzip feature
const ENABLE_GZIP = (process.env.ASSET_PRELOAD_ENABLE_GZIP ?? 'true') === 'true'
const GZIP_MIN_BYTES = Number(process.env.ASSET_PRELOAD_GZIP_MIN_SIZE ?? 1024) // 1KB
const GZIP_TYPES = (
process.env.ASSET_PRELOAD_GZIP_MIME_TYPES ??
'text/,application/javascript,application/json,application/xml,image/svg+xml'
)
.split(',')
.map((v) => v.trim())
.filter(Boolean)
/**
* Convert a simple glob pattern to a regular expression
* Supports * wildcard for matching any characters
*/
function globToRegExp(glob: string): RegExp {
function convertGlobToRegExp(globPattern: string): RegExp {
// Escape regex special chars except *, then replace * with .*
const escaped = glob
const escapedPattern = globPattern
.replace(/[-/\\^$+?.()|[\]{}]/g, '\\$&')
.replace(/\*/g, '.*')
return new RegExp(`^${escaped}$`, 'i')
return new RegExp(`^${escapedPattern}$`, 'i')
}
/**
* Compute ETag for a given data buffer
*/
function computeEtag(data: Uint8Array): string {
const hash = Bun.hash(data)
return `W/"${hash.toString(16)}-${data.byteLength.toString()}"`
}
/**
@@ -95,18 +155,30 @@ interface AssetMetadata {
}
/**
* Result of static asset preloading process
* In-memory asset with ETag and Gzip support
*/
interface PreloadResult {
routes: Record<string, () => Response>
loaded: Array<AssetMetadata>
skipped: Array<AssetMetadata>
interface InMemoryAsset {
raw: Uint8Array
gz?: Uint8Array
etag?: string
type: string
immutable: boolean
size: number
}
/**
* Check if a file should be included based on configured patterns
* Result of static asset preloading process
*/
function shouldInclude(relativePath: string): boolean {
interface PreloadResult {
routes: Record<string, (req: Request) => Response | Promise<Response>>
loaded: AssetMetadata[]
skipped: AssetMetadata[]
}
/**
* Check if a file is eligible for preloading based on configured patterns
*/
function isFileEligibleForPreloading(relativePath: string): boolean {
const fileName = relativePath.split(/[/\\]/).pop() ?? relativePath
// If include patterns are specified, file must match at least one
@@ -125,38 +197,122 @@ function shouldInclude(relativePath: string): boolean {
}
/**
* Build static routes with intelligent preloading strategy
* Check if a MIME type is compressible
*/
function isMimeTypeCompressible(mimeType: string): boolean {
return GZIP_TYPES.some((type) =>
type.endsWith('/') ? mimeType.startsWith(type) : mimeType === type,
)
}
/**
* Conditionally compress data based on size and MIME type
*/
function compressDataIfAppropriate(
data: Uint8Array,
mimeType: string,
): Uint8Array | undefined {
if (!ENABLE_GZIP) return undefined
if (data.byteLength < GZIP_MIN_BYTES) return undefined
if (!isMimeTypeCompressible(mimeType)) return undefined
try {
return Bun.gzipSync(data.buffer as ArrayBuffer)
} catch {
return undefined
}
}
/**
* Create response handler function with ETag and Gzip support
*/
function createResponseHandler(
asset: InMemoryAsset,
): (req: Request) => Response {
return (req: Request) => {
const headers: Record<string, string> = {
'Content-Type': asset.type,
'Cache-Control': asset.immutable
? 'public, max-age=31536000, immutable'
: 'public, max-age=3600',
}
if (ENABLE_ETAG && asset.etag) {
const ifNone = req.headers.get('if-none-match')
if (ifNone && ifNone === asset.etag) {
return new Response(null, {
status: 304,
headers: { ETag: asset.etag },
})
}
headers.ETag = asset.etag
}
if (
ENABLE_GZIP &&
asset.gz &&
req.headers.get('accept-encoding')?.includes('gzip')
) {
headers['Content-Encoding'] = 'gzip'
headers['Content-Length'] = String(asset.gz.byteLength)
const gzCopy = new Uint8Array(asset.gz)
return new Response(gzCopy, { status: 200, headers })
}
headers['Content-Length'] = String(asset.raw.byteLength)
const rawCopy = new Uint8Array(asset.raw)
return new Response(rawCopy, { status: 200, headers })
}
}
/**
* Create composite glob pattern from include patterns
*/
function createCompositeGlobPattern(): Bun.Glob {
const raw = (process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? '')
.split(',')
.map((s) => s.trim())
.filter(Boolean)
if (raw.length === 0) return new Bun.Glob('**/*')
if (raw.length === 1) return new Bun.Glob(raw[0])
return new Bun.Glob(`{${raw.join(',')}}`)
}
/**
* Initialize static routes with intelligent preloading strategy
* Small files are loaded into memory, large files are served on-demand
*/
async function buildStaticRoutes(clientDir: string): Promise<PreloadResult> {
const routes: Record<string, () => Response> = {}
const loaded: Array<AssetMetadata> = []
const skipped: Array<AssetMetadata> = []
async function initializeStaticRoutes(
clientDirectory: string,
): Promise<PreloadResult> {
const routes: Record<string, (req: Request) => Response | Promise<Response>> =
{}
const loaded: AssetMetadata[] = []
const skipped: AssetMetadata[] = []
console.log(`📦 Loading static assets from ${clientDir}...`)
console.log(
` Max preload size: ${(MAX_PRELOAD_BYTES / 1024 / 1024).toFixed(2)} MB`,
)
if (INCLUDE_PATTERNS.length > 0) {
log.info(`Loading static assets from ${clientDirectory}...`)
if (VERBOSE) {
console.log(
` Include patterns: ${process.env.STATIC_PRELOAD_INCLUDE ?? ''}`,
)
}
if (EXCLUDE_PATTERNS.length > 0) {
console.log(
` Exclude patterns: ${process.env.STATIC_PRELOAD_EXCLUDE ?? ''}`,
`Max preload size: ${(MAX_PRELOAD_BYTES / 1024 / 1024).toFixed(2)} MB`,
)
if (INCLUDE_PATTERNS.length > 0) {
console.log(
`Include patterns: ${process.env.ASSET_PRELOAD_INCLUDE_PATTERNS ?? ''}`,
)
}
if (EXCLUDE_PATTERNS.length > 0) {
console.log(
`Exclude patterns: ${process.env.ASSET_PRELOAD_EXCLUDE_PATTERNS ?? ''}`,
)
}
}
let totalPreloadedBytes = 0
try {
// Read all files recursively
const files = await readdir(clientDir, { recursive: true })
for (const relativePath of files) {
const filepath = join(clientDir, relativePath)
const route = '/' + relativePath.replace(/\\/g, '/') // Handle Windows paths
const glob = createCompositeGlobPattern()
for await (const relativePath of glob.scan({ cwd: clientDirectory })) {
const filepath = path.join(clientDirectory, relativePath)
const route = `/${relativePath.split(path.sep).join(path.posix.sep)}`
try {
// Get file metadata
@@ -174,20 +330,23 @@ async function buildStaticRoutes(clientDir: string): Promise<PreloadResult> {
}
// Determine if file should be preloaded
const matchesPattern = shouldInclude(relativePath)
const matchesPattern = isFileEligibleForPreloading(relativePath)
const withinSizeLimit = file.size <= MAX_PRELOAD_BYTES
if (matchesPattern && withinSizeLimit) {
// Preload small files into memory
const bytes = await file.bytes()
routes[route] = () =>
new Response(bytes, {
headers: {
'Content-Type': metadata.type,
'Cache-Control': 'public, max-age=31536000, immutable',
},
})
// Preload small files into memory with ETag and Gzip support
const bytes = new Uint8Array(await file.arrayBuffer())
const gz = compressDataIfAppropriate(bytes, metadata.type)
const etag = ENABLE_ETAG ? computeEtag(bytes) : undefined
const asset: InMemoryAsset = {
raw: bytes,
gz,
etag,
type: metadata.type,
immutable: true,
size: bytes.byteLength,
}
routes[route] = createResponseHandler(asset)
loaded.push({ ...metadata, size: bytes.byteLength })
totalPreloadedBytes += bytes.byteLength
@@ -207,13 +366,13 @@ async function buildStaticRoutes(clientDir: string): Promise<PreloadResult> {
}
} catch (error: unknown) {
if (error instanceof Error && error.name !== 'EISDIR') {
console.error(`Failed to load ${filepath}:`, error)
log.error(`Failed to load ${filepath}: ${error.message}`)
}
}
}
// Always show file overview in Vite-like format first
if (loaded.length > 0 || skipped.length > 0) {
// Show detailed file overview only when verbose mode is enabled
if (VERBOSE && (loaded.length > 0 || skipped.length > 0)) {
const allFiles = [...loaded, ...skipped].sort((a, b) =>
a.route.localeCompare(b.route),
)
@@ -224,124 +383,162 @@ async function buildStaticRoutes(clientDir: string): Promise<PreloadResult> {
60,
)
// Format file size with KB and gzip estimation
const formatFileSize = (bytes: number) => {
// Format file size with KB and actual gzip size
const formatFileSize = (bytes: number, gzBytes?: number) => {
const kb = bytes / 1024
// Rough gzip estimation (typically 30-70% compression)
const sizeStr = kb < 100 ? kb.toFixed(2) : kb.toFixed(1)
if (gzBytes !== undefined) {
const gzKb = gzBytes / 1024
const gzStr = gzKb < 100 ? gzKb.toFixed(2) : gzKb.toFixed(1)
return {
size: sizeStr,
gzip: gzStr,
}
}
// Rough gzip estimation (typically 30-70% compression) if no actual gzip data
const gzipKb = kb * 0.35
return {
size: kb < 100 ? kb.toFixed(2) : kb.toFixed(1),
size: sizeStr,
gzip: gzipKb < 100 ? gzipKb.toFixed(2) : gzipKb.toFixed(1),
}
}
if (loaded.length > 0) {
console.log('\n📁 Preloaded into memory:')
console.log(
'Path │ Size │ Gzip Size',
)
loaded
.sort((a, b) => a.route.localeCompare(b.route))
.forEach((file) => {
const { size, gzip } = formatFileSize(file.size)
const paddedPath = file.route.padEnd(maxPathLength)
const sizeStr = `${size.padStart(7)} kB`
const gzipStr = `gzip: ${gzip.padStart(6)} kB`
console.log(` ${paddedPath} ${sizeStr}${gzipStr}`)
const gzipStr = `${gzip.padStart(7)} kB`
console.log(`${paddedPath} ${sizeStr} ${gzipStr}`)
})
}
if (skipped.length > 0) {
console.log('\n💾 Served on-demand:')
console.log(
'Path │ Size │ Gzip Size',
)
skipped
.sort((a, b) => a.route.localeCompare(b.route))
.forEach((file) => {
const { size, gzip } = formatFileSize(file.size)
const paddedPath = file.route.padEnd(maxPathLength)
const sizeStr = `${size.padStart(7)} kB`
const gzipStr = `gzip: ${gzip.padStart(6)} kB`
console.log(` ${paddedPath} ${sizeStr}${gzipStr}`)
const gzipStr = `${gzip.padStart(7)} kB`
console.log(`${paddedPath} ${sizeStr} ${gzipStr}`)
})
}
}
// Show detailed verbose info if enabled
if (VERBOSE) {
// Show detailed verbose info if enabled
if (VERBOSE) {
if (loaded.length > 0 || skipped.length > 0) {
const allFiles = [...loaded, ...skipped].sort((a, b) =>
a.route.localeCompare(b.route),
)
console.log('\n📊 Detailed file information:')
console.log(
'Status │ Path │ MIME Type │ Reason',
)
allFiles.forEach((file) => {
const isPreloaded = loaded.includes(file)
const status = isPreloaded ? '[MEMORY]' : '[ON-DEMAND]'
const status = isPreloaded ? 'MEMORY' : 'ON-DEMAND'
const reason =
!isPreloaded && file.size > MAX_PRELOAD_BYTES
? ' (too large)'
? 'too large'
: !isPreloaded
? ' (filtered)'
: ''
? 'filtered'
: 'preloaded'
const route =
file.route.length > 30
? file.route.substring(0, 27) + '...'
: file.route
console.log(
` ${status.padEnd(12)} ${file.route} - ${file.type}${reason}`,
`${status.padEnd(12)} ${route.padEnd(30)} ${file.type.padEnd(28)}${reason.padEnd(10)}`,
)
})
} else {
console.log('\n📊 No files found to display')
}
}
// Log summary after the file list
console.log() // Empty line for separation
if (loaded.length > 0) {
console.log(
`Preloaded ${String(loaded.length)} files (${(totalPreloadedBytes / 1024 / 1024).toFixed(2)} MB) into memory`,
log.success(
`Preloaded ${String(loaded.length)} files (${(totalPreloadedBytes / 1024 / 1024).toFixed(2)} MB) into memory`,
)
} else {
console.log(' No files preloaded into memory')
log.info('No files preloaded into memory')
}
if (skipped.length > 0) {
const tooLarge = skipped.filter((f) => f.size > MAX_PRELOAD_BYTES).length
const filtered = skipped.length - tooLarge
console.log(
` ${String(skipped.length)} files will be served on-demand (${String(tooLarge)} too large, ${String(filtered)} filtered)`,
log.info(
`${String(skipped.length)} files will be served on-demand (${String(tooLarge)} too large, ${String(filtered)} filtered)`,
)
}
} catch (error) {
console.error(`❌ Failed to load static files from ${clientDir}:`, error)
log.error(
`Failed to load static files from ${clientDirectory}: ${String(error)}`,
)
}
return { routes, loaded, skipped }
}
/**
* Start the production server
* Initialize the server
*/
async function startServer() {
console.log('🚀 Starting production server...')
async function initializeServer() {
log.header('Starting Production Server')
// Load TanStack Start server handler
let handler: { fetch: (request: Request) => Response | Promise<Response> }
try {
const serverModule = (await import(SERVER_ENTRY)) as {
const serverModule = (await import(SERVER_ENTRY_POINT)) as {
default: { fetch: (request: Request) => Response | Promise<Response> }
}
handler = serverModule.default
console.log('TanStack Start handler loaded')
log.success('TanStack Start application handler initialized')
} catch (error) {
console.error('❌ Failed to load server handler:', error)
log.error(`Failed to load server handler: ${String(error)}`)
process.exit(1)
}
// Build static routes with intelligent preloading
const { routes } = await buildStaticRoutes(CLIENT_DIR)
const { routes } = await initializeStaticRoutes(CLIENT_DIRECTORY)
// Create Bun server
const server = Bun.serve({
port: PORT,
idleTimeout: 255,
port: SERVER_PORT,
routes: {
// Serve static assets (preloaded or on-demand)
...routes,
// Fallback to TanStack Start handler for all other routes
'/*': (request) => {
'/*': async (req: Request) => {
try {
return handler.fetch(request)
const h3Response = await handler.fetch(req)
const body = await h3Response.arrayBuffer()
return new Response(body, {
status: h3Response.status,
statusText: h3Response.statusText,
headers: h3Response.headers,
})
} catch (error) {
console.error('Server handler error:', error)
log.error(`Server handler error: ${String(error)}`)
return new Response('Internal Server Error', { status: 500 })
}
},
@@ -349,18 +546,18 @@ async function startServer() {
// Global error handler
error(error) {
console.error('Uncaught server error:', error)
log.error(
`Uncaught server error: ${error instanceof Error ? error.message : String(error)}`,
)
return new Response('Internal Server Error', { status: 500 })
},
})
console.log(
`\n🚀 Server running at http://localhost:${String(server.port)}\n`,
)
log.success(`Server listening on http://localhost:${String(server.port)}`)
}
// Start the server
startServer().catch((error: unknown) => {
console.error('Failed to start server:', error)
// Initialize the server
initializeServer().catch((error: unknown) => {
log.error(`Failed to start server: ${String(error)}`)
process.exit(1)
})

View File

@@ -14,6 +14,7 @@ import { Route as LogoutRouteImport } from './routes/logout'
import { Route as LoginRouteImport } from './routes/login'
import { Route as AuthedRouteImport } from './routes/_authed'
import { Route as AuthedIndexRouteImport } from './routes/_authed/index'
import { Route as ApiHealthRouteImport } from './routes/api/health'
import { Route as AuthedStatsRouteImport } from './routes/_authed/stats'
import { Route as AuthedSettingsRouteImport } from './routes/_authed/settings'
import { Route as AuthedBadgesRouteImport } from './routes/_authed/badges'
@@ -67,6 +68,11 @@ const AuthedIndexRoute = AuthedIndexRouteImport.update({
path: '/',
getParentRoute: () => AuthedRoute,
} as any)
const ApiHealthRoute = ApiHealthRouteImport.update({
id: '/api/health',
path: '/api/health',
getParentRoute: () => rootRouteImport,
} as any)
const AuthedStatsRoute = AuthedStatsRouteImport.update({
id: '/stats',
path: '/stats',
@@ -217,6 +223,7 @@ const AuthedAdminTournamentsIdTeamsRoute =
} as any)
export interface FileRoutesByFullPath {
'/': typeof AuthedIndexRoute
'/login': typeof LoginRoute
'/logout': typeof LogoutRoute
'/refresh-session': typeof RefreshSessionRoute
@@ -224,7 +231,7 @@ export interface FileRoutesByFullPath {
'/badges': typeof AuthedBadgesRoute
'/settings': typeof AuthedSettingsRoute
'/stats': typeof AuthedStatsRoute
'/': typeof AuthedIndexRoute
'/api/health': typeof ApiHealthRoute
'/admin/activities': typeof AuthedAdminActivitiesRoute
'/admin/badges': typeof AuthedAdminBadgesRoute
'/admin/preview': typeof AuthedAdminPreviewRoute
@@ -242,13 +249,13 @@ export interface FileRoutesByFullPath {
'/api/teams/upload-logo': typeof ApiTeamsUploadLogoRoute
'/api/tournaments/upload-logo': typeof ApiTournamentsUploadLogoRoute
'/admin/': typeof AuthedAdminIndexRoute
'/tournaments': typeof AuthedTournamentsIndexRoute
'/tournaments/': typeof AuthedTournamentsIndexRoute
'/tournaments/$id/bracket': typeof AuthedTournamentsIdBracketRoute
'/admin/tournaments': typeof AuthedAdminTournamentsIndexRoute
'/admin/tournaments/': typeof AuthedAdminTournamentsIndexRoute
'/admin/tournaments/$id/teams': typeof AuthedAdminTournamentsIdTeamsRoute
'/admin/tournaments/run/$id': typeof AuthedAdminTournamentsRunIdRoute
'/api/files/$collection/$recordId/$file': typeof ApiFilesCollectionRecordIdFileRoute
'/admin/tournaments/$id': typeof AuthedAdminTournamentsIdIndexRoute
'/admin/tournaments/$id/': typeof AuthedAdminTournamentsIdIndexRoute
}
export interface FileRoutesByTo {
'/login': typeof LoginRoute
@@ -257,6 +264,7 @@ export interface FileRoutesByTo {
'/badges': typeof AuthedBadgesRoute
'/settings': typeof AuthedSettingsRoute
'/stats': typeof AuthedStatsRoute
'/api/health': typeof ApiHealthRoute
'/': typeof AuthedIndexRoute
'/admin/activities': typeof AuthedAdminActivitiesRoute
'/admin/badges': typeof AuthedAdminBadgesRoute
@@ -293,6 +301,7 @@ export interface FileRoutesById {
'/_authed/badges': typeof AuthedBadgesRoute
'/_authed/settings': typeof AuthedSettingsRoute
'/_authed/stats': typeof AuthedStatsRoute
'/api/health': typeof ApiHealthRoute
'/_authed/': typeof AuthedIndexRoute
'/_authed/admin/activities': typeof AuthedAdminActivitiesRoute
'/_authed/admin/badges': typeof AuthedAdminBadgesRoute
@@ -322,6 +331,7 @@ export interface FileRoutesById {
export interface FileRouteTypes {
fileRoutesByFullPath: FileRoutesByFullPath
fullPaths:
| '/'
| '/login'
| '/logout'
| '/refresh-session'
@@ -329,7 +339,7 @@ export interface FileRouteTypes {
| '/badges'
| '/settings'
| '/stats'
| '/'
| '/api/health'
| '/admin/activities'
| '/admin/badges'
| '/admin/preview'
@@ -347,13 +357,13 @@ export interface FileRouteTypes {
| '/api/teams/upload-logo'
| '/api/tournaments/upload-logo'
| '/admin/'
| '/tournaments'
| '/tournaments/'
| '/tournaments/$id/bracket'
| '/admin/tournaments'
| '/admin/tournaments/'
| '/admin/tournaments/$id/teams'
| '/admin/tournaments/run/$id'
| '/api/files/$collection/$recordId/$file'
| '/admin/tournaments/$id'
| '/admin/tournaments/$id/'
fileRoutesByTo: FileRoutesByTo
to:
| '/login'
@@ -362,6 +372,7 @@ export interface FileRouteTypes {
| '/badges'
| '/settings'
| '/stats'
| '/api/health'
| '/'
| '/admin/activities'
| '/admin/badges'
@@ -397,6 +408,7 @@ export interface FileRouteTypes {
| '/_authed/badges'
| '/_authed/settings'
| '/_authed/stats'
| '/api/health'
| '/_authed/'
| '/_authed/admin/activities'
| '/_authed/admin/badges'
@@ -429,6 +441,7 @@ export interface RootRouteChildren {
LoginRoute: typeof LoginRoute
LogoutRoute: typeof LogoutRoute
RefreshSessionRoute: typeof RefreshSessionRoute
ApiHealthRoute: typeof ApiHealthRoute
ApiAuthSplatRoute: typeof ApiAuthSplatRoute
ApiEventsSplatRoute: typeof ApiEventsSplatRoute
ApiSpotifyCallbackRoute: typeof ApiSpotifyCallbackRoute
@@ -468,7 +481,7 @@ declare module '@tanstack/react-router' {
'/_authed': {
id: '/_authed'
path: ''
fullPath: ''
fullPath: '/'
preLoaderRoute: typeof AuthedRouteImport
parentRoute: typeof rootRouteImport
}
@@ -479,6 +492,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AuthedIndexRouteImport
parentRoute: typeof AuthedRoute
}
'/api/health': {
id: '/api/health'
path: '/api/health'
fullPath: '/api/health'
preLoaderRoute: typeof ApiHealthRouteImport
parentRoute: typeof rootRouteImport
}
'/_authed/stats': {
id: '/_authed/stats'
path: '/stats'
@@ -510,7 +530,7 @@ declare module '@tanstack/react-router' {
'/_authed/tournaments/': {
id: '/_authed/tournaments/'
path: '/tournaments'
fullPath: '/tournaments'
fullPath: '/tournaments/'
preLoaderRoute: typeof AuthedTournamentsIndexRouteImport
parentRoute: typeof AuthedRoute
}
@@ -636,7 +656,7 @@ declare module '@tanstack/react-router' {
'/_authed/admin/tournaments/': {
id: '/_authed/admin/tournaments/'
path: '/tournaments'
fullPath: '/admin/tournaments'
fullPath: '/admin/tournaments/'
preLoaderRoute: typeof AuthedAdminTournamentsIndexRouteImport
parentRoute: typeof AuthedAdminRoute
}
@@ -650,7 +670,7 @@ declare module '@tanstack/react-router' {
'/_authed/admin/tournaments/$id/': {
id: '/_authed/admin/tournaments/$id/'
path: '/tournaments/$id'
fullPath: '/admin/tournaments/$id'
fullPath: '/admin/tournaments/$id/'
preLoaderRoute: typeof AuthedAdminTournamentsIdIndexRouteImport
parentRoute: typeof AuthedAdminRoute
}
@@ -738,6 +758,7 @@ const rootRouteChildren: RootRouteChildren = {
LoginRoute: LoginRoute,
LogoutRoute: LogoutRoute,
RefreshSessionRoute: RefreshSessionRoute,
ApiHealthRoute: ApiHealthRoute,
ApiAuthSplatRoute: ApiAuthSplatRoute,
ApiEventsSplatRoute: ApiEventsSplatRoute,
ApiSpotifyCallbackRoute: ApiSpotifyCallbackRoute,

View File

@@ -0,0 +1,22 @@
import { createFileRoute } from "@tanstack/react-router";
export const Route = createFileRoute("/api/health")({
server: {
handlers: {
GET: () => {
return new Response(
JSON.stringify({
status: "ok",
timestamp: new Date().toISOString(),
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
},
}
);
},
},
},
});

View File

@@ -1,6 +1,7 @@
import { createContext, useCallback, useEffect, useState, PropsWithChildren } from 'react';
import { SpotifyAuth } from '@/lib/spotify/auth';
import { useAuth } from './auth-context';
import { useConfig } from '@/hooks/use-config';
import type {
SpotifyContextType,
SpotifyAuthState,
@@ -23,15 +24,16 @@ export const SpotifyContext = createContext<SpotifyContextType | null>(null);
export const SpotifyProvider: React.FC<PropsWithChildren> = ({ children }) => {
const { roles } = useAuth();
const isAdmin = roles?.includes('Admin') || false;
const config = useConfig();
const [authState, setAuthState] = useState<SpotifyAuthState>(defaultSpotifyState);
const [currentTrack, setCurrentTrack] = useState<SpotifyTrack | null>(null);
const [playbackState, setPlaybackState] = useState<SpotifyPlaybackState | null>(null);
const [devices, setDevices] = useState<SpotifyDevice[]>([]);
const [activeDevice, setActiveDeviceState] = useState<SpotifyDevice | null>(null);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
@@ -40,8 +42,8 @@ export const SpotifyProvider: React.FC<PropsWithChildren> = ({ children }) => {
const [isResumeLoading, setIsResumeLoading] = useState(false);
const spotifyAuth = new SpotifyAuth(
import.meta.env.VITE_SPOTIFY_CLIENT_ID!,
import.meta.env.VITE_SPOTIFY_REDIRECT_URI!
config.spotifyClientId,
config.spotifyRedirectUri
);
useEffect(() => {

12
src/hooks/use-config.ts Normal file
View File

@@ -0,0 +1,12 @@
import { useSuspenseQuery } from '@tanstack/react-query'
import { getConfig } from '@/lib/config'
export function useConfig() {
const { data } = useSuspenseQuery({
queryKey: ['config'],
queryFn: () => getConfig(),
staleTime: Infinity,
})
return data
}

19
src/lib/config.ts Normal file
View File

@@ -0,0 +1,19 @@
import { createServerFn } from '@tanstack/react-start'
export const getConfig = createServerFn({ method: 'GET' }).handler(async () => {
return {
apiDomain: process.env.VITE_API_DOMAIN || 'http://localhost:3000',
websiteDomain: process.env.VITE_WEBSITE_DOMAIN || 'http://localhost:3000',
spotifyClientId: process.env.VITE_SPOTIFY_CLIENT_ID || '',
spotifyRedirectUri: process.env.VITE_SPOTIFY_REDIRECT_URI || '',
}
})
export const serverConfig = {
apiDomain: process.env.VITE_API_DOMAIN || 'http://localhost:3000',
websiteDomain: process.env.VITE_WEBSITE_DOMAIN || 'http://localhost:3000',
supertokensUri: process.env.SUPERTOKENS_URI || 'http://localhost:3567',
pocketbaseUrl: process.env.POCKETBASE_URL || 'http://localhost:8090',
spotifyClientId: process.env.VITE_SPOTIFY_CLIENT_ID || '',
spotifyClientSecret: process.env.SPOTIFY_CLIENT_SECRET || '',
}

View File

@@ -1,7 +1,14 @@
const getOrigin = (): string => {
if (typeof window !== 'undefined') {
return window.location.origin;
}
return process.env.VITE_API_DOMAIN || 'http://localhost:3000';
};
export const appInfo = {
appName: 'FLXN',
apiDomain: import.meta.env.VITE_API_DOMAIN || 'http://localhost:3000',
websiteDomain: import.meta.env.VITE_WEBSITE_DOMAIN || 'http://localhost:3000',
apiDomain: getOrigin(),
websiteDomain: getOrigin(),
apiBasePath: '/api/auth',
websiteBasePath: '/auth',
}

View File

@@ -1,12 +1,12 @@
import SuperTokens from "supertokens-node";
import Session from "supertokens-node/recipe/session";
import { TypeInput } from "supertokens-node/types";
import Dashboard from "supertokens-node/recipe/dashboard";
import UserRoles from "supertokens-node/recipe/userroles";
import { appInfo } from "./config";
import PasswordlessDevelopmentMode from "./recipes/passwordless-development-mode";
import { logger } from "./";
import PasswordlessTwilioVerify from "./recipes/passwordless-twilio-verify";
import { logger } from "./";
import type { TypeInput } from "supertokens-node/types";
export const backendConfig = (): TypeInput => {
return {
@@ -14,25 +14,26 @@ export const backendConfig = (): TypeInput => {
supertokens: {
connectionURI:
process.env.SUPERTOKENS_URI || "https://try.supertokens.io",
apiKey: process.env.SUPERTOKENS_API_KEY || undefined,
},
appInfo,
recipeList: [
//PasswordlessTwilioVerify.init(),
PasswordlessDevelopmentMode.init(),
process.env.NODE_ENV === 'production'
? PasswordlessTwilioVerify.init()
: PasswordlessDevelopmentMode.init(),
Session.init({
cookieSameSite: "lax",
cookieSecure: import.meta.env.NODE_ENV === "production",
cookieDomain:
import.meta.env.NODE_ENV === "production" ? ".example.com" : undefined,
antiCsrf: import.meta.env.NODE_ENV === "production" ? "VIA_TOKEN" : "NONE",
cookieSecure: process.env.NODE_ENV === "production",
cookieDomain: process.env.COOKIE_DOMAIN || undefined,
antiCsrf: process.env.NODE_ENV === "production" ? "VIA_TOKEN" : "NONE",
// Debug only
exposeAccessTokenToFrontendInCookieBasedAuth: true,
exposeAccessTokenToFrontendInCookieBasedAuth: process.env.NODE_ENV !== "production",
}),
Dashboard.init(),
UserRoles.init(),
],
telemetry: import.meta.env.NODE_ENV !== "production",
telemetry: process.env.NODE_ENV !== "production",
};
};

View File

@@ -4,10 +4,12 @@ import { getRequest } from "@tanstack/react-start/server";
export const serverFnLoggingMiddleware = createMiddleware({
type: "function",
}).server(async ({ next, data, functionId, context }) => {
}).server(async ({ next, data, context }) => {
const request = getRequest();
const serverFnName = functionId.split('--')[1]?.split('_')[0] || 'unknown';
const url = new URL(request.url);
const pathParts = url.pathname.split('/').filter(Boolean);
const serverFnName = pathParts[pathParts.length - 1] || 'unknown';
const userId = (context as any)?.metadata?.player_id || 'unknown';
const startTime = Date.now();

269
test.js
View File

@@ -1,269 +0,0 @@
import PocketBase from "pocketbase";
import * as xlsx from "xlsx";
import { nanoid } from "nanoid";
import { createTeamsService } from "./src/lib/pocketbase/services/teams.ts";
import { createPlayersService } from "./src/lib/pocketbase/services/players.ts";
import { createMatchesService } from "./src/lib/pocketbase/services/matches.ts";
import { createTournamentsService } from "./src/lib/pocketbase/services/tournaments.ts";
const POCKETBASE_URL = "http://127.0.0.1:8090";
const EXCEL_FILE_PATH = "./Teams-2.xlsx";
const ADMIN_EMAIL = "kyle.yohler@gmail.com";
const ADMIN_PASSWORD = "xj44aqz9CWrNNM0o";
// --- Helpers ---
async function createPlayerIfMissing(playersService, nameColumn, idColumn) {
const playerId = idColumn?.trim();
if (playerId) return playerId;
let firstName, lastName;
if (!nameColumn || !nameColumn.trim()) {
firstName = `Player_${nanoid(4)}`;
lastName = "(Regional)";
} else {
const parts = nameColumn.trim().split(" ");
firstName = parts[0];
lastName = parts[1] || "(Regional)";
}
const newPlayer = await playersService.createPlayer({ first_name: firstName, last_name: lastName });
return newPlayer.id;
}
async function handleTeamsSheet(rows, teamsService, playersService, pb, tournamentIdMap = {}) {
console.log(`📥 Importing ${rows.length} teams...`);
const teamIdMap = {}; // spreadsheet ID -> PocketBase ID
for (const [i, row] of rows.entries()) {
try {
const spreadsheetTeamId = row["ID"]?.toString().trim();
if (!spreadsheetTeamId) {
console.warn(`⚠️ [${i + 1}] Team row missing spreadsheet ID, skipping.`);
continue;
}
const p1Id = await createPlayerIfMissing(playersService, row["P1 Name"], row["P1 ID"]);
const p2Id = await createPlayerIfMissing(playersService, row["P2 Name"], row["P2 ID"]);
let name = row["Name"]?.trim();
if (!name) {
const p1First = row["P1 Name"]?.split(" ")[0] || "Player1";
const p2First = row["P2 Name"]?.split(" ")[0] || "Player2";
name = `${p1First} and ${p2First}`;
console.warn(`⚠️ [${i + 1}] No team name found. Using generated name: ${name}`);
}
const existing = await pb.collection("teams").getFullList({
filter: `name = "${name}"`,
fields: "id",
});
if (existing.length > 0) {
console.log(` [${i + 1}] Team "${name}" already exists, skipping.`);
teamIdMap[spreadsheetTeamId] = existing[0].id;
continue;
}
// If there's a tournament for this team, get its PB ID
const tournamentSpreadsheetId = row["Tournament ID"]?.toString().trim();
const tournamentId = tournamentSpreadsheetId ? tournamentIdMap[tournamentSpreadsheetId] : undefined;
const teamInput = {
name,
primary_color: row.primary_color || "",
accent_color: row.accent_color || "",
logo: row.logo || "",
players: [p1Id, p2Id],
tournament: tournamentId, // single tournament relation,
private: true
};
const team = await teamsService.createTeam(teamInput);
teamIdMap[spreadsheetTeamId] = team.id;
console.log(`✅ [${i + 1}] Created team: ${team.name} with players: ${[p1Id, p2Id].join(", ")}`);
// Add the team to the tournament's "teams" relation
if (tournamentId) {
await pb.collection("tournaments").update(tournamentId, {
"teams+": [team.id],
});
console.log(`✅ Added team "${team.name}" to tournament ${tournamentId}`);
}
} catch (err) {
console.error(`❌ [${i + 1}] Failed to create team: ${err.message}`);
}
}
return teamIdMap;
}
async function handleTournamentSheet(rows, tournamentsService, teamIdMap, pb) {
console.log(`📥 Importing ${rows.length} tournaments...`);
const tournamentIdMap = {};
const validFormats = ["double_elim", "single_elim", "groups", "swiss", "swiss_bracket"];
for (const [i, row] of rows.entries()) {
try {
const spreadsheetId = row["ID"]?.toString().trim();
if (!spreadsheetId) {
console.warn(`⚠️ [${i + 1}] Tournament missing spreadsheet ID, skipping.`);
continue;
}
if (!row["Name"]) {
console.warn(`⚠️ [${i + 1}] Tournament name missing, skipping.`);
continue;
}
const format = validFormats.includes(row["Format"]) ? row["Format"] : "double_elim";
// Convert start_time to ISO datetime string
let startTime = null;
if (row["Start Time"]) {
try {
startTime = new Date(row["Start Time"]).toISOString();
} catch (e) {
console.warn(`⚠️ [${i + 1}] Invalid start time format, using null`);
}
}
const tournamentInput = {
name: row["Name"],
start_time: startTime,
format,
regional: true,
teams: Object.values(teamIdMap), // Add all created teams
};
const tournament = await tournamentsService.createTournament(tournamentInput);
tournamentIdMap[spreadsheetId] = tournament.id;
console.log(`✅ [${i + 1}] Created tournament: ${tournament.name} with ${Object.values(teamIdMap).length} teams`);
} catch (err) {
console.error(`❌ [${i + 1}] Failed to create tournament: ${err.message}`);
}
}
return tournamentIdMap;
}
async function handleMatchesSheet(rows, matchesService, teamIdMap, tournamentIdMap, pb) {
console.log(`📥 Importing ${rows.length} matches...`);
const tournamentMatchesMap = {};
for (const [i, row] of rows.entries()) {
try {
const homeId = teamIdMap[row["Home ID"]];
const awayId = teamIdMap[row["Away ID"]];
const tournamentId = tournamentIdMap[row["Tournament ID"]];
if (!homeId || !awayId || !tournamentId) {
console.warn(`⚠️ [${i + 1}] Could not find mapping for Home, Away, or Tournament, skipping.`);
continue;
}
// --- Ensure the teams are linked to the tournament ---
for (const teamId of [homeId, awayId]) {
const team = await pb.collection("teams").getOne(teamId, { fields: "tournaments" });
const tournaments = team.tournaments || [];
if (!tournaments.includes(tournamentId)) {
// Add tournament to team
await pb.collection("teams").update(teamId, { "tournaments+": [tournamentId] });
// Add team to tournament
await pb.collection("tournaments").update(tournamentId, { "teams+": [teamId] });
console.log(`✅ Linked team ${team.name} to tournament ${tournamentId}`);
}
}
// --- Create match ---
const data = {
tournament: tournamentId,
home: homeId,
away: awayId,
home_cups: Number(row["Home cups"] || 0),
away_cups: Number(row["Away cups"] || 0),
status: "ended",
lid: i+1
};
const match = await matchesService.createMatch(data);
console.log(`✅ [${i + 1}] Created match ID: ${match.id}`);
if (!tournamentMatchesMap[tournamentId]) tournamentMatchesMap[tournamentId] = [];
tournamentMatchesMap[tournamentId].push(match.id);
} catch (err) {
console.error(`❌ [${i + 1}] Failed to create match: ${err.message}`);
}
}
// Update each tournament with the created match IDs
for (const [tournamentId, matchIds] of Object.entries(tournamentMatchesMap)) {
try {
await pb.collection("tournaments").update(tournamentId, { "matches+": matchIds });
console.log(`✅ Updated tournament ${tournamentId} with ${matchIds.length} matches`);
} catch (err) {
console.error(`❌ Failed to update tournament ${tournamentId} with matches: ${err.message}`);
}
}
}
// --- Main Import ---
export async function importExcel() {
const pb = new PocketBase(POCKETBASE_URL);
await pb.admins.authWithPassword(ADMIN_EMAIL, ADMIN_PASSWORD);
const teamsService = createTeamsService(pb);
const playersService = createPlayersService(pb);
const tournamentsService = createTournamentsService(pb);
const matchesService = createMatchesService(pb);
const workbook = xlsx.readFile(EXCEL_FILE_PATH);
let teamIdMap = {};
let tournamentIdMap = {};
// Process sheets in correct order: Tournaments -> Teams -> Matches
const sheetOrder = ["tournament", "tournaments", "teams", "matches"];
const processedSheets = new Set();
for (const sheetNamePattern of sheetOrder) {
for (const sheetName of workbook.SheetNames) {
if (processedSheets.has(sheetName)) continue;
if (sheetName.toLowerCase() !== sheetNamePattern) continue;
const worksheet = workbook.Sheets[sheetName];
const rows = xlsx.utils.sheet_to_json(worksheet);
console.log(`\n📘 Processing sheet: ${sheetName}`);
switch (sheetName.toLowerCase()) {
case "teams":
teamIdMap = await handleTeamsSheet(rows, teamsService, playersService, pb, tournamentIdMap);
break;
case "tournament":
case "tournaments":
tournamentIdMap = await handleTournamentSheet(rows, tournamentsService, teamIdMap, pb);
break;
case "matches":
await handleMatchesSheet(rows, matchesService, teamIdMap, tournamentIdMap, pb);
break;
default:
console.log(`⚠️ No handler found for sheet '${sheetName}', skipping.`);
}
processedSheets.add(sheetName);
}
}
console.log("\n🎉 All sheets imported successfully!");
}
// --- Run ---
importExcel().catch(console.error);

View File

@@ -6,7 +6,10 @@ import react from '@vitejs/plugin-react';
export default defineConfig({
server: {
port: 3000,
allowedHosts: ["dev.flexxon.app"]
allowedHosts: ["dev.flexxon.app", "flexxon.app"]
},
ssr: {
noExternal: true,
},
plugins: [
tsConfigPaths({