This commit is contained in:
Matthias Hochmeister
2026-02-23 17:08:58 +01:00
commit f09748f4a1
97 changed files with 17729 additions and 0 deletions

69
backend/.dockerignore Normal file
View File

@@ -0,0 +1,69 @@
# Dependencies
node_modules
npm-debug.log
yarn-error.log
package-lock.json
yarn.lock
# Build output
dist
build
*.tsbuildinfo
# Environment variables
.env
.env.local
.env.*.local
*.env
# IDE and editor files
.vscode
.idea
*.swp
*.swo
*~
.DS_Store
# Logs
logs
*.log
npm-debug.log*
pids
*.pid
*.seed
*.pid.lock
# Testing
coverage
.nyc_output
test-results
# Temporary files
tmp
temp
*.tmp
# Git
.git
.gitignore
.gitattributes
# Documentation
README.md
CHANGELOG.md
docs
# CI/CD
.github
.gitlab-ci.yml
.travis.yml
Jenkinsfile
# Docker
Dockerfile
.dockerignore
docker-compose*.yml
# Misc
.cache
.parcel-cache

14
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,14 @@
node_modules/
dist/
logs/
*.log
.env
.env.development
.env.production
.DS_Store
coverage/
*.swp
*.swo
*~
.vscode/
.idea/

69
backend/Dockerfile Normal file
View File

@@ -0,0 +1,69 @@
# ===========================
# Build Stage
# ===========================
FROM node:20-alpine AS builder
# Set working directory
WORKDIR /app
# Install build dependencies
RUN apk add --no-cache python3 make g++
# Copy package files for dependency installation
COPY package*.json ./
# Install all dependencies (including devDependencies for building)
RUN npm ci
# Copy TypeScript configuration and source code
COPY tsconfig.json ./
COPY src ./src
# Build TypeScript to JavaScript
RUN npm run build
# Prune dev dependencies
RUN npm prune --production
# ===========================
# Production Stage
# ===========================
FROM node:20-alpine AS production
# Install wget for health checks
RUN apk add --no-cache wget
# Create non-root user for security
RUN addgroup -g 1001 -S nodejs && \
adduser -S nodejs -u 1001
# Set working directory
WORKDIR /app
# Copy package files
COPY package*.json ./
# Copy production node_modules from builder
COPY --from=builder /app/node_modules ./node_modules
# Copy compiled JavaScript from builder
COPY --from=builder /app/dist ./dist
# Copy database migrations (needed for runtime)
COPY --from=builder /app/src/database/migrations ./dist/database/migrations
# Change ownership to non-root user
RUN chown -R nodejs:nodejs /app
# Switch to non-root user
USER nodejs
# Expose application port
EXPOSE 3000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --retries=3 --start-period=40s \
CMD wget --quiet --tries=1 --spider http://localhost:3000/health || exit 1
# Start the application
CMD ["node", "dist/server.js"]

94
backend/README.md Normal file
View File

@@ -0,0 +1,94 @@
# Feuerwehr Dashboard Backend
## Description
Backend API for the Feuerwehr Dashboard application built with Node.js, Express, and TypeScript.
## Tech Stack
- Node.js
- Express
- TypeScript
- PostgreSQL
- Winston (Logging)
- JWT (Authentication)
- Helmet (Security)
- Zod (Validation)
## Prerequisites
- Node.js (v18 or higher)
- PostgreSQL (v14 or higher)
- npm or yarn
## Installation
```bash
npm install
```
## Configuration
Create a `.env.development` file in the root directory:
```env
NODE_ENV=development
PORT=3000
DB_HOST=localhost
DB_PORT=5432
DB_NAME=feuerwehr_dev
DB_USER=dev_user
DB_PASSWORD=dev_password
JWT_SECRET=your-secret-key-change-in-production
JWT_EXPIRES_IN=24h
CORS_ORIGIN=http://localhost:3001
RATE_LIMIT_WINDOW_MS=900000
RATE_LIMIT_MAX=100
LOG_LEVEL=info
```
## Development
```bash
# Run in development mode with hot reload
npm run dev
# Build TypeScript to JavaScript
npm run build
# Run production build
npm start
```
## Project Structure
```
backend/
├── src/
│ ├── config/ # Configuration files
│ ├── controllers/ # Route controllers
│ ├── database/ # Database migrations
│ ├── middleware/ # Express middleware
│ ├── models/ # Data models
│ ├── routes/ # API routes
│ ├── services/ # Business logic
│ ├── types/ # TypeScript types
│ ├── utils/ # Utility functions
│ ├── app.ts # Express app setup
│ └── server.ts # Server entry point
├── dist/ # Compiled JavaScript
├── logs/ # Application logs
└── package.json
```
## API Endpoints
### Health Check
- `GET /health` - Server health status
## Scripts
- `npm run dev` - Start development server with hot reload
- `npm run build` - Build TypeScript to JavaScript
- `npm start` - Run production server
- `npm test` - Run tests (not yet implemented)
## License
ISC

9
backend/nodemon.json Normal file
View File

@@ -0,0 +1,9 @@
{
"watch": ["src"],
"ext": "ts,json",
"ignore": ["src/**/*.spec.ts", "src/**/*.test.ts"],
"exec": "ts-node src/server.ts",
"env": {
"NODE_ENV": "development"
}
}

2151
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

38
backend/package.json Normal file
View File

@@ -0,0 +1,38 @@
{
"name": "backend",
"version": "1.0.0",
"description": "Feuerwehr Dashboard Backend API",
"main": "dist/server.js",
"scripts": {
"dev": "nodemon",
"build": "tsc",
"start": "node dist/server.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "commonjs",
"dependencies": {
"axios": "^1.13.5",
"cors": "^2.8.6",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"express-rate-limit": "^8.2.1",
"helmet": "^8.1.0",
"jsonwebtoken": "^9.0.3",
"pg": "^8.18.0",
"winston": "^3.19.0",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/cors": "^2.8.19",
"@types/express": "^5.0.6",
"@types/jsonwebtoken": "^9.0.10",
"@types/node": "^25.3.0",
"@types/pg": "^8.16.0",
"nodemon": "^3.1.14",
"ts-node": "^10.9.2",
"typescript": "^5.9.3"
}
}

68
backend/src/app.ts Normal file
View File

@@ -0,0 +1,68 @@
import express, { Application, Request, Response } from 'express';
import cors from 'cors';
import helmet from 'helmet';
import rateLimit from 'express-rate-limit';
import environment from './config/environment';
import logger from './utils/logger';
import { errorHandler, notFoundHandler } from './middleware/error.middleware';
const app: Application = express();
// Security middleware
app.use(helmet());
// CORS configuration
app.use(cors({
origin: environment.cors.origin,
credentials: true,
}));
// Rate limiting
const limiter = rateLimit({
windowMs: environment.rateLimit.windowMs,
max: environment.rateLimit.max,
message: 'Too many requests from this IP, please try again later.',
standardHeaders: true,
legacyHeaders: false,
});
app.use('/api', limiter);
// Body parsing middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
// Request logging middleware
app.use((req: Request, _res: Response, next) => {
logger.info('Incoming request', {
method: req.method,
path: req.path,
ip: req.ip,
});
next();
});
// Health check endpoint
app.get('/health', (_req: Request, res: Response) => {
res.status(200).json({
status: 'ok',
timestamp: new Date().toISOString(),
uptime: process.uptime(),
environment: environment.nodeEnv,
});
});
// API routes
import authRoutes from './routes/auth.routes';
import userRoutes from './routes/user.routes';
app.use('/api/auth', authRoutes);
app.use('/api/user', userRoutes);
// 404 handler
app.use(notFoundHandler);
// Error handling middleware (must be last)
app.use(errorHandler);
export default app;

View File

@@ -0,0 +1,25 @@
import environment from './environment';
interface AuthentikConfig {
issuer: string;
clientId: string;
clientSecret: string;
redirectUri: string;
tokenEndpoint: string;
userInfoEndpoint: string;
authorizeEndpoint: string;
logoutEndpoint: string;
}
const authentikConfig: AuthentikConfig = {
issuer: environment.authentik.issuer,
clientId: environment.authentik.clientId,
clientSecret: environment.authentik.clientSecret,
redirectUri: environment.authentik.redirectUri,
tokenEndpoint: `${environment.authentik.issuer}token/`,
userInfoEndpoint: `${environment.authentik.issuer}userinfo/`,
authorizeEndpoint: `${environment.authentik.issuer}authorize/`,
logoutEndpoint: `${environment.authentik.issuer}logout/`,
};
export default authentikConfig;

View File

@@ -0,0 +1,145 @@
import { Pool, PoolConfig } from 'pg';
import * as fs from 'fs';
import * as path from 'path';
import environment from './environment';
import logger from '../utils/logger';
const poolConfig: PoolConfig = {
host: environment.database.host,
port: environment.database.port,
database: environment.database.name,
user: environment.database.user,
password: environment.database.password,
max: 20, // Maximum number of clients in the pool
idleTimeoutMillis: 30000, // Close idle clients after 30 seconds
connectionTimeoutMillis: 2000, // Return an error if connection takes longer than 2 seconds
};
const pool = new Pool(poolConfig);
// Handle pool errors
pool.on('error', (err) => {
logger.error('Unexpected error on idle database client', err);
});
// Test database connection
export const testConnection = async (): Promise<boolean> => {
try {
const client = await pool.connect();
const result = await client.query('SELECT NOW()');
logger.info('Database connection successful', { timestamp: result.rows[0].now });
client.release();
return true;
} catch (error) {
logger.error('Failed to connect to database', { error });
return false;
}
};
// Graceful shutdown
export const closePool = async (): Promise<void> => {
try {
await pool.end();
logger.info('Database pool closed');
} catch (error) {
logger.error('Error closing database pool', { error });
}
};
/**
* Check if a table exists in the database
*/
export const tableExists = async (tableName: string): Promise<boolean> => {
try {
const result = await pool.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = $1
)`,
[tableName]
);
return result.rows[0].exists;
} catch (error) {
logger.error(`Failed to check if table ${tableName} exists`, { error });
return false;
}
};
/**
* Run database migrations from SQL files
*/
export const runMigrations = async (): Promise<void> => {
const migrationsDir = path.join(__dirname, '../database/migrations');
try {
// Check if migrations directory exists
if (!fs.existsSync(migrationsDir)) {
logger.warn('Migrations directory not found', { path: migrationsDir });
return;
}
// Read all migration files
const files = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.sql'))
.sort(); // Sort to ensure migrations run in order
if (files.length === 0) {
logger.info('No migration files found');
return;
}
logger.info(`Found ${files.length} migration file(s)`);
// Create migrations tracking table if it doesn't exist
await pool.query(`
CREATE TABLE IF NOT EXISTS migrations (
id SERIAL PRIMARY KEY,
filename VARCHAR(255) UNIQUE NOT NULL,
executed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
)
`);
// Run each migration
for (const file of files) {
// Check if migration has already been run
const result = await pool.query(
'SELECT filename FROM migrations WHERE filename = $1',
[file]
);
if (result.rows.length > 0) {
logger.info(`Migration ${file} already executed, skipping`);
continue;
}
// Read and execute migration
const filePath = path.join(migrationsDir, file);
const sql = fs.readFileSync(filePath, 'utf8');
logger.info(`Running migration: ${file}`);
await pool.query('BEGIN');
try {
await pool.query(sql);
await pool.query(
'INSERT INTO migrations (filename) VALUES ($1)',
[file]
);
await pool.query('COMMIT');
logger.info(`Migration ${file} completed successfully`);
} catch (error) {
await pool.query('ROLLBACK');
logger.error(`Migration ${file} failed`, { error });
throw error;
}
}
logger.info('All migrations completed successfully');
} catch (error) {
logger.error('Failed to run migrations', { error });
throw error;
}
};
export default pool;

View File

@@ -0,0 +1,69 @@
import dotenv from 'dotenv';
import path from 'path';
// Load environment-specific .env file
const envFile = process.env.NODE_ENV === 'production'
? '.env.production'
: '.env.development';
dotenv.config({ path: path.resolve(__dirname, '../../', envFile) });
interface EnvironmentConfig {
nodeEnv: string;
port: number;
database: {
host: string;
port: number;
name: string;
user: string;
password: string;
};
jwt: {
secret: string;
expiresIn: string | number;
};
cors: {
origin: string;
};
rateLimit: {
windowMs: number;
max: number;
};
authentik: {
issuer: string;
clientId: string;
clientSecret: string;
redirectUri: string;
};
}
const environment: EnvironmentConfig = {
nodeEnv: process.env.NODE_ENV || 'development',
port: parseInt(process.env.PORT || '3000', 10),
database: {
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432', 10),
name: process.env.DB_NAME || 'feuerwehr_dev',
user: process.env.DB_USER || 'dev_user',
password: process.env.DB_PASSWORD || 'dev_password',
},
jwt: {
secret: process.env.JWT_SECRET || 'your-secret-key-change-in-production',
expiresIn: process.env.JWT_EXPIRES_IN || '24h',
},
cors: {
origin: process.env.CORS_ORIGIN || 'http://localhost:3001',
},
rateLimit: {
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS || '900000', 10), // 15 minutes
max: parseInt(process.env.RATE_LIMIT_MAX || '100', 10),
},
authentik: {
issuer: process.env.AUTHENTIK_ISSUER || 'https://authentik.yourdomain.com/application/o/your-app/',
clientId: process.env.AUTHENTIK_CLIENT_ID || 'your_client_id_here',
clientSecret: process.env.AUTHENTIK_CLIENT_SECRET || 'your_client_secret_here',
redirectUri: process.env.AUTHENTIK_REDIRECT_URI || 'http://localhost:5173/auth/callback',
},
};
export default environment;

View File

@@ -0,0 +1,246 @@
import { Request, Response } from 'express';
import authentikService from '../services/authentik.service';
import tokenService from '../services/token.service';
import userService from '../services/user.service';
import logger from '../utils/logger';
import { AuthRequest } from '../types/auth.types';
class AuthController {
/**
* Handle OAuth callback
* POST /api/auth/callback
*/
async handleCallback(req: Request, res: Response): Promise<void> {
try {
const { code } = req.body as AuthRequest;
// Validate code
if (!code) {
res.status(400).json({
success: false,
message: 'Authorization code is required',
});
return;
}
logger.info('Processing OAuth callback', { hasCode: !!code });
// Step 1: Exchange code for tokens
const tokens = await authentikService.exchangeCodeForTokens(code);
// Step 2: Get user info from Authentik
const userInfo = await authentikService.getUserInfo(tokens.access_token);
// Step 3: Verify ID token if present
if (tokens.id_token) {
try {
authentikService.verifyIdToken(tokens.id_token);
} catch (error) {
logger.warn('ID token verification failed', { error });
}
}
// Step 4: Find or create user in database
let user = await userService.findByAuthentikSub(userInfo.sub);
if (!user) {
// User doesn't exist, create new user
logger.info('Creating new user from Authentik', {
sub: userInfo.sub,
email: userInfo.email,
});
user = await userService.createUser({
email: userInfo.email,
authentik_sub: userInfo.sub,
preferred_username: userInfo.preferred_username,
given_name: userInfo.given_name,
family_name: userInfo.family_name,
name: userInfo.name,
profile_picture_url: userInfo.picture,
});
} else {
// User exists, update last login
logger.info('Existing user logging in', {
userId: user.id,
email: user.email,
});
await userService.updateLastLogin(user.id);
}
// Check if user is active
if (!user.is_active) {
logger.warn('Inactive user attempted login', { userId: user.id });
res.status(403).json({
success: false,
message: 'User account is inactive',
});
return;
}
// Step 5: Generate internal JWT token
const accessToken = tokenService.generateToken({
userId: user.id,
email: user.email,
authentikSub: user.authentik_sub,
});
// Generate refresh token
const refreshToken = tokenService.generateRefreshToken({
userId: user.id,
email: user.email,
});
logger.info('User authenticated successfully', {
userId: user.id,
email: user.email,
});
// Step 6: Return tokens and user info
res.status(200).json({
success: true,
message: 'Authentication successful',
data: {
accessToken,
refreshToken,
user: {
id: user.id,
email: user.email,
name: user.name,
preferredUsername: user.preferred_username,
givenName: user.given_name,
familyName: user.family_name,
profilePictureUrl: user.profile_picture_url,
isActive: user.is_active,
},
},
});
} catch (error) {
logger.error('OAuth callback error', { error });
const message =
error instanceof Error ? error.message : 'Authentication failed';
res.status(500).json({
success: false,
message,
});
}
}
/**
* Handle logout
* POST /api/auth/logout
*/
async handleLogout(req: Request, res: Response): Promise<void> {
try {
// In a stateless JWT setup, logout is handled client-side by removing the token
// However, we can log the event for audit purposes
if (req.user) {
logger.info('User logged out', {
userId: req.user.id,
email: req.user.email,
});
}
res.status(200).json({
success: true,
message: 'Logout successful',
});
} catch (error) {
logger.error('Logout error', { error });
res.status(500).json({
success: false,
message: 'Logout failed',
});
}
}
/**
* Handle token refresh
* POST /api/auth/refresh
*/
async handleRefresh(req: Request, res: Response): Promise<void> {
try {
const { refreshToken } = req.body;
if (!refreshToken) {
res.status(400).json({
success: false,
message: 'Refresh token is required',
});
return;
}
// Verify refresh token
let decoded;
try {
decoded = tokenService.verifyRefreshToken(refreshToken);
} catch (error) {
const message = error instanceof Error ? error.message : 'Invalid refresh token';
res.status(401).json({
success: false,
message,
});
return;
}
// Get user from database
const user = await userService.findById(decoded.userId);
if (!user) {
logger.warn('Refresh token valid but user not found', {
userId: decoded.userId,
});
res.status(401).json({
success: false,
message: 'User not found',
});
return;
}
if (!user.is_active) {
logger.warn('Inactive user attempted token refresh', {
userId: user.id,
});
res.status(403).json({
success: false,
message: 'User account is inactive',
});
return;
}
// Generate new access token
const accessToken = tokenService.generateToken({
userId: user.id,
email: user.email,
authentikSub: user.authentik_sub,
});
logger.info('Token refreshed successfully', {
userId: user.id,
email: user.email,
});
res.status(200).json({
success: true,
message: 'Token refreshed successfully',
data: {
accessToken,
},
});
} catch (error) {
logger.error('Token refresh error', { error });
res.status(500).json({
success: false,
message: 'Token refresh failed',
});
}
}
}
export default new AuthController();

View File

@@ -0,0 +1,63 @@
import { Request, Response } from 'express';
import userService from '../services/user.service';
import logger from '../utils/logger';
class UserController {
/**
* Get current user
* GET /api/user/me
*/
async getCurrentUser(req: Request, res: Response): Promise<void> {
try {
// User is attached by auth middleware
if (!req.user) {
res.status(401).json({
success: false,
message: 'Not authenticated',
});
return;
}
// Get full user details from database
const user = await userService.findById(req.user.id);
if (!user) {
logger.warn('Authenticated user not found in database', {
userId: req.user.id,
});
res.status(404).json({
success: false,
message: 'User not found',
});
return;
}
logger.debug('Fetched current user', { userId: user.id });
res.status(200).json({
success: true,
data: {
id: user.id,
email: user.email,
name: user.name,
preferredUsername: user.preferred_username,
givenName: user.given_name,
familyName: user.family_name,
profilePictureUrl: user.profile_picture_url,
isActive: user.is_active,
lastLoginAt: user.last_login_at,
createdAt: user.created_at,
},
});
} catch (error) {
logger.error('Get current user error', { error });
res.status(500).json({
success: false,
message: 'Failed to fetch user information',
});
}
}
}
export default new UserController();

View File

@@ -0,0 +1,37 @@
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE TABLE users (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
authentik_sub VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
name VARCHAR(255),
preferred_username VARCHAR(255),
given_name VARCHAR(255),
family_name VARCHAR(255),
profile_picture_url TEXT,
refresh_token TEXT,
refresh_token_expires_at TIMESTAMP WITH TIME ZONE,
last_login_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
preferences JSONB DEFAULT '{}',
is_active BOOLEAN DEFAULT TRUE
);
CREATE INDEX idx_users_authentik_sub ON users(authentik_sub);
CREATE INDEX idx_users_email ON users(email);
CREATE INDEX idx_users_last_login ON users(last_login_at);
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

View File

@@ -0,0 +1,223 @@
# Database Migrations
This directory contains SQL migration files for the Feuerwehr Dashboard database schema.
## Overview
Migrations are automatically executed when the application starts. Each migration file is tracked in the `migrations` table to ensure it only runs once.
## Migration Files
Migration files follow the naming convention: `{number}_{description}.sql`
Example:
- `001_create_users_table.sql`
- `002_add_roles_table.sql`
The numeric prefix determines the execution order. Always use sequential numbering.
## How Migrations Work
### Automatic Execution (Docker)
When running the application with Docker, migrations are automatically executed during startup:
1. Application starts
2. Database connection is established
3. `runMigrations()` function is called
4. Migration tracking table (`migrations`) is created if it doesn't exist
5. Each `.sql` file in this directory is checked:
- If already executed (recorded in `migrations` table): **skipped**
- If new: **executed within a transaction**
6. Successfully executed migrations are recorded in the tracking table
### Migration Tracking
The system creates a `migrations` table to track which migrations have been executed:
```sql
CREATE TABLE migrations (
id SERIAL PRIMARY KEY,
filename VARCHAR(255) UNIQUE NOT NULL,
executed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
```
This ensures each migration runs exactly once, even across multiple deployments.
## Manual Migration Execution
If you need to run migrations manually (development or troubleshooting):
### Using psql (PostgreSQL CLI)
```bash
# Connect to the database
docker exec -it feuerwehr-postgres psql -U feuerwehr_user -d feuerwehr_db
# Run a specific migration
\i /path/to/migration/001_create_users_table.sql
# Or if inside the container
\i /docker-entrypoint-initdb.d/migrations/001_create_users_table.sql
```
### Using npm/node script
Create a migration runner script:
```bash
# From backend directory
npm run migrate
```
You can add this to `package.json`:
```json
{
"scripts": {
"migrate": "ts-node src/scripts/migrate.ts"
}
}
```
## Creating New Migrations
1. **Determine the next migration number**
- Check existing files in this directory
- Use the next sequential number (e.g., if `001_` exists, create `002_`)
2. **Create a new `.sql` file**
```bash
touch src/database/migrations/002_add_new_feature.sql
```
3. **Write your SQL schema changes**
```sql
-- Always include IF EXISTS checks for safety
CREATE TABLE IF NOT EXISTS my_table (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
name VARCHAR(255) NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Add indexes
CREATE INDEX IF NOT EXISTS idx_my_table_name ON my_table(name);
```
4. **Restart the application**
- Docker will automatically detect and run the new migration
- Or call `runMigrations()` programmatically
## Best Practices
### 1. Make Migrations Idempotent
Always use `IF EXISTS` or `IF NOT EXISTS` clauses:
```sql
CREATE TABLE IF NOT EXISTS users (...);
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
ALTER TABLE users ADD COLUMN IF NOT EXISTS new_column VARCHAR(255);
```
### 2. Use Transactions
Each migration runs within a transaction automatically. If any part fails, the entire migration is rolled back.
### 3. Never Modify Existing Migrations
Once a migration has been deployed to production:
- **Never modify it**
- Create a new migration for changes
- This ensures consistency across all environments
### 4. Test Migrations Locally
Before deploying:
```bash
# Start fresh database
docker-compose down -v
docker-compose up -d postgres
# Run migrations
npm run dev
# or
npm run migrate
```
### 5. Include Rollback Instructions
Document how to revert changes in comments:
```sql
-- Migration: Add user preferences column
-- Rollback: ALTER TABLE users DROP COLUMN preferences;
ALTER TABLE users ADD COLUMN preferences JSONB DEFAULT '{}';
```
## Troubleshooting
### Migration Failed
If a migration fails:
1. **Check the logs** for error details
2. **Fix the SQL** in the migration file
3. **Remove the failed entry** from the tracking table:
```sql
DELETE FROM migrations WHERE filename = '002_failed_migration.sql';
```
4. **Restart the application** or re-run migrations
### Reset All Migrations
For development only (destroys all data):
```bash
# Drop and recreate database
docker-compose down -v
docker-compose up -d postgres
# Migrations will run automatically on next start
docker-compose up backend
```
### Check Migration Status
```sql
-- See which migrations have been executed
SELECT * FROM migrations ORDER BY executed_at DESC;
-- Check if a specific table exists
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'users'
);
```
## Current Migrations
### 001_create_users_table.sql
Creates the main `users` table for storing authenticated user data:
- **UUID primary key** with automatic generation
- **Authentik integration** fields (sub, email, profile data)
- **Token management** (refresh_token, expiration)
- **Audit fields** (last_login, created_at, updated_at)
- **User preferences** as JSONB
- **Active status** flag
- **Indexes** for performance:
- `authentik_sub` (unique identifier from OIDC)
- `email` (for lookups)
- `last_login_at` (for activity tracking)
- **Automatic timestamp updates** via trigger
## References
- [PostgreSQL Documentation](https://www.postgresql.org/docs/)
- [pg (node-postgres) Library](https://node-postgres.com/)
- Application database config: `src/config/database.ts`

View File

@@ -0,0 +1,155 @@
import { Request, Response, NextFunction } from 'express';
import tokenService from '../services/token.service';
import userService from '../services/user.service';
import logger from '../utils/logger';
import { JwtPayload } from '../types/auth.types';
// Extend Express Request type to include user
declare global {
namespace Express {
interface Request {
user?: {
id: string; // UUID
email: string;
authentikSub: string;
};
}
}
}
/**
* Authentication middleware
* Validates JWT token and attaches user info to request
*/
export const authenticate = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
// Extract token from Authorization header
const authHeader = req.headers.authorization;
if (!authHeader) {
res.status(401).json({
success: false,
message: 'No authorization token provided',
});
return;
}
// Check for Bearer token format
const parts = authHeader.split(' ');
if (parts.length !== 2 || parts[0] !== 'Bearer') {
res.status(401).json({
success: false,
message: 'Invalid authorization header format. Use: Bearer <token>',
});
return;
}
const token = parts[1];
// Verify token
let decoded: JwtPayload;
try {
decoded = tokenService.verifyToken(token);
} catch (error) {
const message = error instanceof Error ? error.message : 'Invalid token';
res.status(401).json({
success: false,
message,
});
return;
}
// Check if user exists and is active
const user = await userService.findById(decoded.userId);
if (!user) {
logger.warn('Token valid but user not found', { userId: decoded.userId });
res.status(401).json({
success: false,
message: 'User not found',
});
return;
}
if (!user.is_active) {
logger.warn('User account is inactive', { userId: decoded.userId });
res.status(403).json({
success: false,
message: 'User account is inactive',
});
return;
}
// Attach user info to request
req.user = {
id: decoded.userId,
email: decoded.email,
authentikSub: decoded.authentikSub,
};
logger.debug('User authenticated successfully', {
userId: decoded.userId,
email: decoded.email,
});
next();
} catch (error) {
logger.error('Authentication middleware error', { error });
res.status(500).json({
success: false,
message: 'Internal server error during authentication',
});
}
};
/**
* Optional authentication middleware
* Attaches user if token is valid, but doesn't require it
*/
export const optionalAuth = async (
req: Request,
_res: Response,
next: NextFunction
): Promise<void> => {
try {
const authHeader = req.headers.authorization;
if (!authHeader) {
next();
return;
}
const parts = authHeader.split(' ');
if (parts.length !== 2 || parts[0] !== 'Bearer') {
next();
return;
}
const token = parts[1];
try {
const decoded = tokenService.verifyToken(token);
const user = await userService.findById(decoded.userId);
if (user && user.is_active) {
req.user = {
id: decoded.userId,
email: decoded.email,
authentikSub: decoded.authentikSub,
};
}
} catch (error) {
// Invalid token - continue without user
logger.debug('Optional auth: Invalid token', { error });
}
next();
} catch (error) {
logger.error('Optional authentication middleware error', { error });
next();
}
};

View File

@@ -0,0 +1,66 @@
import { Request, Response, NextFunction } from 'express';
import logger from '../utils/logger';
export class AppError extends Error {
statusCode: number;
isOperational: boolean;
constructor(message: string, statusCode: number = 500) {
super(message);
this.statusCode = statusCode;
this.isOperational = true;
Error.captureStackTrace(this, this.constructor);
}
}
export const errorHandler = (
err: Error | AppError,
req: Request,
res: Response,
_next: NextFunction
): void => {
if (err instanceof AppError) {
logger.error('Application Error', {
message: err.message,
statusCode: err.statusCode,
stack: err.stack,
path: req.path,
method: req.method,
});
res.status(err.statusCode).json({
status: 'error',
message: err.message,
});
return;
}
// Handle unexpected errors
logger.error('Unexpected Error', {
message: err.message,
stack: err.stack,
path: req.path,
method: req.method,
});
res.status(500).json({
status: 'error',
message: process.env.NODE_ENV === 'production'
? 'Internal server error'
: err.message,
});
};
export const notFoundHandler = (req: Request, res: Response): void => {
res.status(404).json({
status: 'error',
message: `Route ${req.originalUrl} not found`,
});
};
export const asyncHandler = (fn: Function) => {
return (req: Request, res: Response, next: NextFunction) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
};

View File

@@ -0,0 +1,37 @@
export interface User {
id: string; // UUID
email: string;
authentik_sub: string;
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
profile_picture_url?: string;
refresh_token?: string;
refresh_token_expires_at?: Date;
is_active: boolean;
last_login_at?: Date;
created_at: Date;
updated_at: Date;
preferences?: any; // JSONB
}
export interface CreateUserData {
email: string;
authentik_sub: string;
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
profile_picture_url?: string;
}
export interface UpdateUserData {
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
profile_picture_url?: string;
is_active?: boolean;
preferences?: any;
}

View File

@@ -0,0 +1,28 @@
import { Router } from 'express';
import authController from '../controllers/auth.controller';
import { optionalAuth } from '../middleware/auth.middleware';
const router = Router();
/**
* @route POST /api/auth/callback
* @desc Handle OAuth callback from Authentik
* @access Public
*/
router.post('/callback', authController.handleCallback);
/**
* @route POST /api/auth/logout
* @desc Logout user
* @access Public (optional auth for logging purposes)
*/
router.post('/logout', optionalAuth, authController.handleLogout);
/**
* @route POST /api/auth/refresh
* @desc Refresh access token
* @access Public
*/
router.post('/refresh', authController.handleRefresh);
export default router;

View File

@@ -0,0 +1,14 @@
import { Router } from 'express';
import userController from '../controllers/user.controller';
import { authenticate } from '../middleware/auth.middleware';
const router = Router();
/**
* @route GET /api/user/me
* @desc Get current authenticated user
* @access Private
*/
router.get('/me', authenticate, userController.getCurrentUser);
export default router;

73
backend/src/server.ts Normal file
View File

@@ -0,0 +1,73 @@
import app from './app';
import environment from './config/environment';
import logger from './utils/logger';
import { testConnection, closePool } from './config/database';
const startServer = async (): Promise<void> => {
try {
// Test database connection
logger.info('Testing database connection...');
const dbConnected = await testConnection();
if (!dbConnected) {
logger.warn('Database connection failed - server will start but database operations may fail');
}
// Start the server
const server = app.listen(environment.port, () => {
logger.info('Server started successfully', {
port: environment.port,
environment: environment.nodeEnv,
database: dbConnected ? 'connected' : 'disconnected',
});
});
// Graceful shutdown handling
const gracefulShutdown = async (signal: string) => {
logger.info(`${signal} received. Starting graceful shutdown...`);
server.close(async () => {
logger.info('HTTP server closed');
// Close database connection
await closePool();
logger.info('Graceful shutdown completed');
process.exit(0);
});
// Force shutdown after 10 seconds
setTimeout(() => {
logger.error('Forced shutdown after timeout');
process.exit(1);
}, 10000);
};
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
} catch (error) {
logger.error('Failed to start server', { error });
process.exit(1);
}
};
// Handle unhandled promise rejections
process.on('unhandledRejection', (reason: Error, _promise: Promise<any>) => {
logger.error('Unhandled Promise Rejection', {
reason: reason.message,
stack: reason.stack,
});
});
// Handle uncaught exceptions
process.on('uncaughtException', (error: Error) => {
logger.error('Uncaught Exception', {
message: error.message,
stack: error.stack,
});
process.exit(1);
});
// Start the server
startServer();

View File

@@ -0,0 +1,158 @@
import axios, { AxiosError } from 'axios';
import authentikConfig from '../config/authentik';
import logger from '../utils/logger';
import { TokenResponse, UserInfo } from '../types/auth.types';
class AuthentikService {
/**
* Exchange authorization code for access and ID tokens
*/
async exchangeCodeForTokens(code: string): Promise<TokenResponse> {
try {
const params = new URLSearchParams({
grant_type: 'authorization_code',
code,
redirect_uri: authentikConfig.redirectUri,
client_id: authentikConfig.clientId,
client_secret: authentikConfig.clientSecret,
});
const response = await axios.post<TokenResponse>(
authentikConfig.tokenEndpoint,
params.toString(),
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
}
);
logger.info('Successfully exchanged code for tokens');
return response.data;
} catch (error) {
this.handleError(error, 'Failed to exchange code for tokens');
throw error;
}
}
/**
* Fetch user information from Authentik using access token
*/
async getUserInfo(accessToken: string): Promise<UserInfo> {
try {
const response = await axios.get<UserInfo>(
authentikConfig.userInfoEndpoint,
{
headers: {
Authorization: `Bearer ${accessToken}`,
},
}
);
logger.info('Successfully fetched user info', {
sub: response.data.sub,
email: response.data.email,
});
return response.data;
} catch (error) {
this.handleError(error, 'Failed to fetch user info');
throw error;
}
}
/**
* Verify and decode ID token (basic validation)
* Note: For production, use a proper JWT verification library like jose or jsonwebtoken
*/
verifyIdToken(idToken: string): any {
try {
// Split the token into parts
const parts = idToken.split('.');
if (parts.length !== 3) {
throw new Error('Invalid ID token format');
}
// Decode the payload (Base64URL)
const payload = JSON.parse(
Buffer.from(parts[1], 'base64url').toString('utf-8')
);
// Basic validation
if (!payload.sub || !payload.email) {
throw new Error('Invalid ID token payload');
}
// Check expiration
if (payload.exp && payload.exp * 1000 < Date.now()) {
throw new Error('ID token has expired');
}
// Check issuer
if (payload.iss && !payload.iss.includes(authentikConfig.issuer)) {
logger.warn('ID token issuer mismatch', {
expected: authentikConfig.issuer,
received: payload.iss,
});
}
logger.info('ID token verified successfully', {
sub: payload.sub,
email: payload.email,
});
return payload;
} catch (error) {
logger.error('Failed to verify ID token', { error });
throw new Error('Invalid ID token');
}
}
/**
* Refresh access token using refresh token
*/
async refreshAccessToken(refreshToken: string): Promise<TokenResponse> {
try {
const params = new URLSearchParams({
grant_type: 'refresh_token',
refresh_token: refreshToken,
client_id: authentikConfig.clientId,
client_secret: authentikConfig.clientSecret,
});
const response = await axios.post<TokenResponse>(
authentikConfig.tokenEndpoint,
params.toString(),
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
}
);
logger.info('Successfully refreshed access token');
return response.data;
} catch (error) {
this.handleError(error, 'Failed to refresh access token');
throw error;
}
}
/**
* Handle axios errors with detailed logging
*/
private handleError(error: unknown, message: string): void {
if (axios.isAxiosError(error)) {
const axiosError = error as AxiosError;
logger.error(message, {
status: axiosError.response?.status,
statusText: axiosError.response?.statusText,
data: axiosError.response?.data,
message: axiosError.message,
});
} else {
logger.error(message, { error });
}
}
}
export default new AuthentikService();

View File

@@ -0,0 +1,122 @@
import jwt from 'jsonwebtoken';
import environment from '../config/environment';
import logger from '../utils/logger';
import { JwtPayload, RefreshTokenPayload } from '../types/auth.types';
class TokenService {
/**
* Generate JWT access token
*/
generateToken(payload: JwtPayload): string {
try {
const token = jwt.sign(
{
userId: payload.userId,
email: payload.email,
authentikSub: payload.authentikSub,
},
environment.jwt.secret,
{
expiresIn: environment.jwt.expiresIn as any,
}
);
logger.info('Generated JWT token', { userId: payload.userId });
return token;
} catch (error) {
logger.error('Failed to generate JWT token', { error });
throw new Error('Token generation failed');
}
}
/**
* Verify and decode JWT token
*/
verifyToken(token: string): JwtPayload {
try {
const decoded = jwt.verify(
token,
environment.jwt.secret
) as JwtPayload;
logger.debug('JWT token verified', { userId: decoded.userId });
return decoded;
} catch (error) {
if (error instanceof jwt.TokenExpiredError) {
logger.warn('JWT token expired');
throw new Error('Token expired');
} else if (error instanceof jwt.JsonWebTokenError) {
logger.warn('Invalid JWT token', { error: error.message });
throw new Error('Invalid token');
} else {
logger.error('Failed to verify JWT token', { error });
throw new Error('Token verification failed');
}
}
}
/**
* Generate refresh token (longer lived)
*/
generateRefreshToken(payload: RefreshTokenPayload): string {
try {
const token = jwt.sign(
{
userId: payload.userId,
email: payload.email,
},
environment.jwt.secret,
{
expiresIn: '7d', // Refresh tokens valid for 7 days
}
);
logger.info('Generated refresh token', { userId: payload.userId });
return token;
} catch (error) {
logger.error('Failed to generate refresh token', { error });
throw new Error('Refresh token generation failed');
}
}
/**
* Verify refresh token
*/
verifyRefreshToken(token: string): RefreshTokenPayload {
try {
const decoded = jwt.verify(
token,
environment.jwt.secret
) as RefreshTokenPayload;
logger.debug('Refresh token verified', { userId: decoded.userId });
return decoded;
} catch (error) {
if (error instanceof jwt.TokenExpiredError) {
logger.warn('Refresh token expired');
throw new Error('Refresh token expired');
} else if (error instanceof jwt.JsonWebTokenError) {
logger.warn('Invalid refresh token', { error: error.message });
throw new Error('Invalid refresh token');
} else {
logger.error('Failed to verify refresh token', { error });
throw new Error('Refresh token verification failed');
}
}
}
/**
* Decode token without verification (for debugging)
*/
decodeToken(token: string): JwtPayload | null {
try {
const decoded = jwt.decode(token) as JwtPayload;
return decoded;
} catch (error) {
logger.error('Failed to decode token', { error });
return null;
}
}
}
export default new TokenService();

View File

@@ -0,0 +1,275 @@
import pool from '../config/database';
import logger from '../utils/logger';
import { User, CreateUserData, UpdateUserData } from '../models/user.model';
class UserService {
/**
* Find user by Authentik sub (subject identifier)
*/
async findByAuthentikSub(sub: string): Promise<User | null> {
try {
const query = `
SELECT id, email, authentik_sub, name, preferred_username, given_name,
family_name, profile_picture_url, refresh_token, refresh_token_expires_at,
is_active, last_login_at, created_at, updated_at, preferences
FROM users
WHERE authentik_sub = $1
`;
const result = await pool.query(query, [sub]);
if (result.rows.length === 0) {
logger.debug('User not found by Authentik sub', { sub });
return null;
}
logger.debug('User found by Authentik sub', { sub, userId: result.rows[0].id });
return result.rows[0] as User;
} catch (error) {
logger.error('Error finding user by Authentik sub', { error, sub });
throw new Error('Database query failed');
}
}
/**
* Find user by email
*/
async findByEmail(email: string): Promise<User | null> {
try {
const query = `
SELECT id, email, authentik_sub, name, preferred_username, given_name,
family_name, profile_picture_url, refresh_token, refresh_token_expires_at,
is_active, last_login_at, created_at, updated_at, preferences
FROM users
WHERE email = $1
`;
const result = await pool.query(query, [email]);
if (result.rows.length === 0) {
logger.debug('User not found by email', { email });
return null;
}
logger.debug('User found by email', { email, userId: result.rows[0].id });
return result.rows[0] as User;
} catch (error) {
logger.error('Error finding user by email', { error, email });
throw new Error('Database query failed');
}
}
/**
* Find user by ID
*/
async findById(id: string): Promise<User | null> {
try {
const query = `
SELECT id, email, authentik_sub, name, preferred_username, given_name,
family_name, profile_picture_url, refresh_token, refresh_token_expires_at,
is_active, last_login_at, created_at, updated_at, preferences
FROM users
WHERE id = $1
`;
const result = await pool.query(query, [id]);
if (result.rows.length === 0) {
logger.debug('User not found by ID', { id });
return null;
}
logger.debug('User found by ID', { id });
return result.rows[0] as User;
} catch (error) {
logger.error('Error finding user by ID', { error, id });
throw new Error('Database query failed');
}
}
/**
* Create a new user
*/
async createUser(userData: CreateUserData): Promise<User> {
try {
const query = `
INSERT INTO users (
email,
authentik_sub,
name,
preferred_username,
given_name,
family_name,
profile_picture_url,
is_active
)
VALUES ($1, $2, $3, $4, $5, $6, $7, true)
RETURNING id, email, authentik_sub, name, preferred_username, given_name,
family_name, profile_picture_url, refresh_token, refresh_token_expires_at,
is_active, last_login_at, created_at, updated_at, preferences
`;
const values = [
userData.email,
userData.authentik_sub,
userData.name || null,
userData.preferred_username || null,
userData.given_name || null,
userData.family_name || null,
userData.profile_picture_url || null,
];
const result = await pool.query(query, values);
const user = result.rows[0] as User;
logger.info('User created successfully', {
userId: user.id,
email: user.email,
});
return user;
} catch (error) {
logger.error('Error creating user', { error, email: userData.email });
throw new Error('Failed to create user');
}
}
/**
* Update user information
*/
async updateUser(id: string, data: UpdateUserData): Promise<User> {
try {
const updateFields: string[] = [];
const values: any[] = [];
let paramCount = 1;
if (data.name !== undefined) {
updateFields.push(`name = $${paramCount++}`);
values.push(data.name);
}
if (data.preferred_username !== undefined) {
updateFields.push(`preferred_username = $${paramCount++}`);
values.push(data.preferred_username);
}
if (data.given_name !== undefined) {
updateFields.push(`given_name = $${paramCount++}`);
values.push(data.given_name);
}
if (data.family_name !== undefined) {
updateFields.push(`family_name = $${paramCount++}`);
values.push(data.family_name);
}
if (data.profile_picture_url !== undefined) {
updateFields.push(`profile_picture_url = $${paramCount++}`);
values.push(data.profile_picture_url);
}
if (data.is_active !== undefined) {
updateFields.push(`is_active = $${paramCount++}`);
values.push(data.is_active);
}
if (data.preferences !== undefined) {
updateFields.push(`preferences = $${paramCount++}`);
values.push(JSON.stringify(data.preferences));
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push(`updated_at = CURRENT_TIMESTAMP`);
values.push(id);
const query = `
UPDATE users
SET ${updateFields.join(', ')}
WHERE id = $${paramCount}
RETURNING id, email, authentik_sub, name, preferred_username, given_name,
family_name, profile_picture_url, refresh_token, refresh_token_expires_at,
is_active, last_login_at, created_at, updated_at, preferences
`;
const result = await pool.query(query, values);
if (result.rows.length === 0) {
throw new Error('User not found');
}
const user = result.rows[0] as User;
logger.info('User updated successfully', { userId: user.id });
return user;
} catch (error) {
logger.error('Error updating user', { error, userId: id });
throw new Error('Failed to update user');
}
}
/**
* Update last login timestamp
*/
async updateLastLogin(id: string): Promise<void> {
try {
const query = `
UPDATE users
SET last_login_at = CURRENT_TIMESTAMP
WHERE id = $1
`;
await pool.query(query, [id]);
logger.debug('Updated last login timestamp', { userId: id });
} catch (error) {
logger.error('Error updating last login', { error, userId: id });
// Don't throw - this is not critical
}
}
/**
* Update refresh token
*/
async updateRefreshToken(
id: string,
refreshToken: string | null,
expiresAt: Date | null
): Promise<void> {
try {
const query = `
UPDATE users
SET refresh_token = $1,
refresh_token_expires_at = $2
WHERE id = $3
`;
await pool.query(query, [refreshToken, expiresAt, id]);
logger.debug('Updated refresh token', { userId: id });
} catch (error) {
logger.error('Error updating refresh token', { error, userId: id });
throw new Error('Failed to update refresh token');
}
}
/**
* Check if user is active
*/
async isUserActive(id: string): Promise<boolean> {
try {
const query = `
SELECT is_active
FROM users
WHERE id = $1
`;
const result = await pool.query(query, [id]);
if (result.rows.length === 0) {
return false;
}
return result.rows[0].is_active;
} catch (error) {
logger.error('Error checking user active status', { error, userId: id });
return false;
}
}
}
export default new UserService();

View File

@@ -0,0 +1,50 @@
export interface TokenResponse {
access_token: string;
token_type: string;
expires_in: number;
refresh_token?: string;
id_token?: string;
}
export interface UserInfo {
sub: string;
email: string;
email_verified?: boolean;
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
picture?: string;
groups?: string[];
}
export interface AuthRequest {
code: string;
state?: string;
}
export interface JwtPayload {
userId: string; // UUID
email: string;
authentikSub: string;
iat?: number;
exp?: number;
}
export interface RefreshTokenPayload {
userId: string; // UUID
email: string;
iat?: number;
exp?: number;
}
export interface AuthenticatedUser {
id: string; // UUID
email: string;
authentikSub: string;
name?: string;
username?: string;
firstName?: string;
lastName?: string;
isActive: boolean;
}

View File

@@ -0,0 +1,92 @@
/**
* User entity matching database schema
*/
export interface User {
id: string;
authentik_sub: string;
email: string;
name: string | null;
preferred_username: string | null;
given_name: string | null;
family_name: string | null;
profile_picture_url: string | null;
refresh_token: string | null;
refresh_token_expires_at: Date | null;
last_login_at: Date | null;
created_at: Date;
updated_at: Date;
preferences: Record<string, any>;
is_active: boolean;
}
/**
* DTO for creating a new user
*/
export interface CreateUserDTO {
authentik_sub: string;
email: string;
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
profile_picture_url?: string;
preferences?: Record<string, any>;
}
/**
* DTO for updating an existing user
*/
export interface UpdateUserDTO {
name?: string;
preferred_username?: string;
given_name?: string;
family_name?: string;
profile_picture_url?: string;
refresh_token?: string | null;
refresh_token_expires_at?: Date | null;
last_login_at?: Date;
preferences?: Record<string, any>;
is_active?: boolean;
}
/**
* User response without sensitive fields
* Used for API responses
*/
export interface UserResponse {
id: string;
email: string;
name: string | null;
preferred_username: string | null;
given_name: string | null;
family_name: string | null;
profile_picture_url: string | null;
last_login_at: Date | null;
created_at: Date;
updated_at: Date;
preferences: Record<string, any>;
is_active: boolean;
}
/**
* Convert User to UserResponse by removing sensitive fields
*/
export function toUserResponse(user: User): UserResponse {
return {
id: user.id,
email: user.email,
name: user.name,
preferred_username: user.preferred_username,
given_name: user.given_name,
family_name: user.family_name,
profile_picture_url: user.profile_picture_url,
last_login_at: user.last_login_at,
created_at: user.created_at,
updated_at: user.updated_at,
preferences: user.preferences,
is_active: user.is_active,
};
}

View File

@@ -0,0 +1,58 @@
import winston from 'winston';
import path from 'path';
const logDir = path.join(__dirname, '../../logs');
// Define log format
const logFormat = winston.format.combine(
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
winston.format.errors({ stack: true }),
winston.format.splat(),
winston.format.json()
);
// Console format for development
const consoleFormat = winston.format.combine(
winston.format.colorize(),
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
winston.format.printf(({ timestamp, level, message, ...meta }) => {
let metaString = '';
if (Object.keys(meta).length > 0) {
metaString = JSON.stringify(meta, null, 2);
}
return `${timestamp} [${level}]: ${message} ${metaString}`;
})
);
// Create the logger
const logger = winston.createLogger({
level: process.env.LOG_LEVEL || 'info',
format: logFormat,
defaultMeta: { service: 'feuerwehr-dashboard-api' },
transports: [
// Write all logs with importance level of 'error' or less to error.log
new winston.transports.File({
filename: path.join(logDir, 'error.log'),
level: 'error',
maxsize: 5242880, // 5MB
maxFiles: 5,
}),
// Write all logs to combined.log
new winston.transports.File({
filename: path.join(logDir, 'combined.log'),
maxsize: 5242880, // 5MB
maxFiles: 5,
}),
],
});
// If not in production, log to the console as well
if (process.env.NODE_ENV !== 'production') {
logger.add(
new winston.transports.Console({
format: consoleFormat,
})
);
}
export default logger;

30
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,30 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"lib": ["ES2020"],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"moduleResolution": "node",
"sourceMap": true,
"declaration": true,
"declarationMap": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"strictBindCallApply": true,
"strictPropertyInitialization": true,
"noImplicitThis": true,
"alwaysStrict": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}