inital
This commit is contained in:
25
backend/src/config/authentik.ts
Normal file
25
backend/src/config/authentik.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import environment from './environment';
|
||||
|
||||
interface AuthentikConfig {
|
||||
issuer: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
redirectUri: string;
|
||||
tokenEndpoint: string;
|
||||
userInfoEndpoint: string;
|
||||
authorizeEndpoint: string;
|
||||
logoutEndpoint: string;
|
||||
}
|
||||
|
||||
const authentikConfig: AuthentikConfig = {
|
||||
issuer: environment.authentik.issuer,
|
||||
clientId: environment.authentik.clientId,
|
||||
clientSecret: environment.authentik.clientSecret,
|
||||
redirectUri: environment.authentik.redirectUri,
|
||||
tokenEndpoint: `${environment.authentik.issuer}token/`,
|
||||
userInfoEndpoint: `${environment.authentik.issuer}userinfo/`,
|
||||
authorizeEndpoint: `${environment.authentik.issuer}authorize/`,
|
||||
logoutEndpoint: `${environment.authentik.issuer}logout/`,
|
||||
};
|
||||
|
||||
export default authentikConfig;
|
||||
145
backend/src/config/database.ts
Normal file
145
backend/src/config/database.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { Pool, PoolConfig } from 'pg';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import environment from './environment';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
const poolConfig: PoolConfig = {
|
||||
host: environment.database.host,
|
||||
port: environment.database.port,
|
||||
database: environment.database.name,
|
||||
user: environment.database.user,
|
||||
password: environment.database.password,
|
||||
max: 20, // Maximum number of clients in the pool
|
||||
idleTimeoutMillis: 30000, // Close idle clients after 30 seconds
|
||||
connectionTimeoutMillis: 2000, // Return an error if connection takes longer than 2 seconds
|
||||
};
|
||||
|
||||
const pool = new Pool(poolConfig);
|
||||
|
||||
// Handle pool errors
|
||||
pool.on('error', (err) => {
|
||||
logger.error('Unexpected error on idle database client', err);
|
||||
});
|
||||
|
||||
// Test database connection
|
||||
export const testConnection = async (): Promise<boolean> => {
|
||||
try {
|
||||
const client = await pool.connect();
|
||||
const result = await client.query('SELECT NOW()');
|
||||
logger.info('Database connection successful', { timestamp: result.rows[0].now });
|
||||
client.release();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('Failed to connect to database', { error });
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// Graceful shutdown
|
||||
export const closePool = async (): Promise<void> => {
|
||||
try {
|
||||
await pool.end();
|
||||
logger.info('Database pool closed');
|
||||
} catch (error) {
|
||||
logger.error('Error closing database pool', { error });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a table exists in the database
|
||||
*/
|
||||
export const tableExists = async (tableName: string): Promise<boolean> => {
|
||||
try {
|
||||
const result = await pool.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = $1
|
||||
)`,
|
||||
[tableName]
|
||||
);
|
||||
return result.rows[0].exists;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to check if table ${tableName} exists`, { error });
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Run database migrations from SQL files
|
||||
*/
|
||||
export const runMigrations = async (): Promise<void> => {
|
||||
const migrationsDir = path.join(__dirname, '../database/migrations');
|
||||
|
||||
try {
|
||||
// Check if migrations directory exists
|
||||
if (!fs.existsSync(migrationsDir)) {
|
||||
logger.warn('Migrations directory not found', { path: migrationsDir });
|
||||
return;
|
||||
}
|
||||
|
||||
// Read all migration files
|
||||
const files = fs.readdirSync(migrationsDir)
|
||||
.filter(file => file.endsWith('.sql'))
|
||||
.sort(); // Sort to ensure migrations run in order
|
||||
|
||||
if (files.length === 0) {
|
||||
logger.info('No migration files found');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Found ${files.length} migration file(s)`);
|
||||
|
||||
// Create migrations tracking table if it doesn't exist
|
||||
await pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
filename VARCHAR(255) UNIQUE NOT NULL,
|
||||
executed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
)
|
||||
`);
|
||||
|
||||
// Run each migration
|
||||
for (const file of files) {
|
||||
// Check if migration has already been run
|
||||
const result = await pool.query(
|
||||
'SELECT filename FROM migrations WHERE filename = $1',
|
||||
[file]
|
||||
);
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
logger.info(`Migration ${file} already executed, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read and execute migration
|
||||
const filePath = path.join(migrationsDir, file);
|
||||
const sql = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
logger.info(`Running migration: ${file}`);
|
||||
|
||||
await pool.query('BEGIN');
|
||||
try {
|
||||
await pool.query(sql);
|
||||
await pool.query(
|
||||
'INSERT INTO migrations (filename) VALUES ($1)',
|
||||
[file]
|
||||
);
|
||||
await pool.query('COMMIT');
|
||||
logger.info(`Migration ${file} completed successfully`);
|
||||
} catch (error) {
|
||||
await pool.query('ROLLBACK');
|
||||
logger.error(`Migration ${file} failed`, { error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('All migrations completed successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to run migrations', { error });
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export default pool;
|
||||
69
backend/src/config/environment.ts
Normal file
69
backend/src/config/environment.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import dotenv from 'dotenv';
|
||||
import path from 'path';
|
||||
|
||||
// Load environment-specific .env file
|
||||
const envFile = process.env.NODE_ENV === 'production'
|
||||
? '.env.production'
|
||||
: '.env.development';
|
||||
|
||||
dotenv.config({ path: path.resolve(__dirname, '../../', envFile) });
|
||||
|
||||
interface EnvironmentConfig {
|
||||
nodeEnv: string;
|
||||
port: number;
|
||||
database: {
|
||||
host: string;
|
||||
port: number;
|
||||
name: string;
|
||||
user: string;
|
||||
password: string;
|
||||
};
|
||||
jwt: {
|
||||
secret: string;
|
||||
expiresIn: string | number;
|
||||
};
|
||||
cors: {
|
||||
origin: string;
|
||||
};
|
||||
rateLimit: {
|
||||
windowMs: number;
|
||||
max: number;
|
||||
};
|
||||
authentik: {
|
||||
issuer: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
redirectUri: string;
|
||||
};
|
||||
}
|
||||
|
||||
const environment: EnvironmentConfig = {
|
||||
nodeEnv: process.env.NODE_ENV || 'development',
|
||||
port: parseInt(process.env.PORT || '3000', 10),
|
||||
database: {
|
||||
host: process.env.DB_HOST || 'localhost',
|
||||
port: parseInt(process.env.DB_PORT || '5432', 10),
|
||||
name: process.env.DB_NAME || 'feuerwehr_dev',
|
||||
user: process.env.DB_USER || 'dev_user',
|
||||
password: process.env.DB_PASSWORD || 'dev_password',
|
||||
},
|
||||
jwt: {
|
||||
secret: process.env.JWT_SECRET || 'your-secret-key-change-in-production',
|
||||
expiresIn: process.env.JWT_EXPIRES_IN || '24h',
|
||||
},
|
||||
cors: {
|
||||
origin: process.env.CORS_ORIGIN || 'http://localhost:3001',
|
||||
},
|
||||
rateLimit: {
|
||||
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS || '900000', 10), // 15 minutes
|
||||
max: parseInt(process.env.RATE_LIMIT_MAX || '100', 10),
|
||||
},
|
||||
authentik: {
|
||||
issuer: process.env.AUTHENTIK_ISSUER || 'https://authentik.yourdomain.com/application/o/your-app/',
|
||||
clientId: process.env.AUTHENTIK_CLIENT_ID || 'your_client_id_here',
|
||||
clientSecret: process.env.AUTHENTIK_CLIENT_SECRET || 'your_client_secret_here',
|
||||
redirectUri: process.env.AUTHENTIK_REDIRECT_URI || 'http://localhost:5173/auth/callback',
|
||||
},
|
||||
};
|
||||
|
||||
export default environment;
|
||||
Reference in New Issue
Block a user