const fs = require('fs'); const path = require('path'); const { createLogger, format, transports } = require('winston'); const crypto = require('crypto'); const DailyRotateFile = require('winston-daily-rotate-file'); const LOG_DIR = process.env.LOG_DIR || path.join(__dirname, '..', 'logs'); const NODE_ENV = process.env.NODE_ENV || 'development'; const LOG_LEVEL = process.env.LOG_LEVEL || 'info'; const SERVICE_NAME = 'central-server'; // ensure log directory exists try { if (!fs.existsSync(LOG_DIR)) { fs.mkdirSync(LOG_DIR, { recursive: true }); } } catch (e) { // fallback in case of permission issues console.error('Failed to ensure log directory', e); } // use JSON (one object per line) to be FluentBit/OpenSearch-friendly const jsonFormat = format.combine( format.timestamp(), format.errors({ stack: true }), format.splat(), format.json() ); // Custom timestamp formatter for console logs const customTimestampFormat = format((info) => { const date = new Date(info.timestamp || Date.now()); const pad = (n) => n < 10 ? '0' + n : n; const day = pad(date.getDate()); const month = pad(date.getMonth() + 1); const year = date.getFullYear(); const hours = pad(date.getHours()); const minutes = pad(date.getMinutes()); const seconds = pad(date.getSeconds()); info.timestamp = `${day}/${month}/${year} ${hours}:${minutes}:${seconds}`; return info; }); const loggerTransports = [ new transports.Console({ level: LOG_LEVEL, format: format.combine( format.timestamp(), customTimestampFormat(), format.colorize({ all: NODE_ENV === 'development' }), // keep console human-friendly in dev, but still structured in prod NODE_ENV === 'development' ? format.printf(({ timestamp, level, message, ...meta }) => { const metaStr = Object.keys(meta).length ? JSON.stringify(meta) : ''; return `${timestamp} ${level}: ${message} ${metaStr}`; }) : jsonFormat ) }) ]; // add daily rotate file transports only in production if (NODE_ENV === 'production') { loggerTransports.push( new DailyRotateFile({ filename: path.join(LOG_DIR, 'error-%DATE%.log'), datePattern: 'YYYY-MM-DD', level: 'error', format: jsonFormat, maxSize: '5m', maxFiles: '7d', zippedArchive: true }), new DailyRotateFile({ filename: path.join(LOG_DIR, 'combined-%DATE%.log'), datePattern: 'YYYY-MM-DD', level: LOG_LEVEL, format: jsonFormat, maxSize: '10m', maxFiles: '7d', zippedArchive: true }) ); } const logger = createLogger({ level: LOG_LEVEL, defaultMeta: { service: SERVICE_NAME, env: NODE_ENV }, transports: loggerTransports, exitOnError: false }); // capture console.* and route to winston so third-party libs are logged const _origConsole = { debug: console.debug, info: console.info, warn: console.warn, error: console.error, log: console.log }; ['debug','info','warn','error','log'].forEach(level => { console[level] = (...args) => { try { const msg = args.map(a => { if (typeof a === 'string') return a; if (a instanceof Error) return a.stack || a.message; try { return JSON.stringify(a); } catch (e) { return String(a); } }).join(' '); // map console.log -> info const winLevel = level === 'log' ? 'info' : level; if (logger[winLevel]) logger[winLevel](msg); else logger.info(msg); } catch (e) { _origConsole[level](...args); } }; }); // runtime setter for log level function setLogLevel(level) { try { logger.level = level; logger.transports.forEach((t) => { if (typeof t.level !== 'undefined') t.level = level; }); logger.info('logLevel:changed', { level }); return true; } catch (e) { _origConsole.error('Failed to set log level', e); return false; } } // Express middleware: attach requestId, log start and end with duration function requestLogger(req, res, next) { const requestId = req.headers['x-request-id'] || crypto.randomBytes(8).toString('hex'); req.id = requestId; const start = process.hrtime(); logger.info('request:start', { requestId, method: req.method, url: req.originalUrl || req.url, ip: req.ip, headers: { referer: req.get('referer') || '', origin: req.get('origin') || '', authorization: req.get('authorization') ? 'present' : 'absent' } }); res.on('finish', () => { const [s, ns] = process.hrtime(start); const durationMs = Math.round((s * 1e3) + (ns / 1e6)); logger.info('request:finish', { requestId, method: req.method, url: req.originalUrl || req.url, status: res.statusCode, durationMs, ip: req.ip, user: req.user ? { id: req.user.id, email: req.user.email } : undefined }); }); res.on('close', () => { const [s, ns] = process.hrtime(start); const durationMs = Math.round((s * 1e3) + (ns / 1e6)); logger.warn('request:closed', { requestId, method: req.method, url: req.originalUrl || req.url, status: res.statusCode, durationMs, ip: req.ip }); }); next(); } // Add per-transport setter and a getter for current levels function setTransportLevel(transportIdentifier, level) { try { let matched = false; const id = (transportIdentifier || 'all').toString().toLowerCase(); logger.transports.forEach((t) => { const ctorName = (t.constructor && t.constructor.name) ? t.constructor.name.toLowerCase() : ''; const tName = (t.name || '').toString().toLowerCase(); // match "console", "file", transport class name, or "all" if (id === 'all' || ctorName.includes(id) || tName.includes(id) || (id === 'file' && ctorName.includes('file')) || (id === 'console' && ctorName.includes('console'))) { if (typeof t.level !== 'undefined') { t.level = level; matched = true; } } }); if (matched) { logger.info('logLevel:transport_changed', { transport: transportIdentifier, level }); return true; } else { _origConsole.warn('setTransportLevel: no transport matched', transportIdentifier); return false; } } catch (e) { _origConsole.error('Failed to set transport level', e); return false; } } function getLoggerLevels() { try { return { loggerLevel: logger.level, transports: logger.transports.map(t => ({ name: (t.constructor && t.constructor.name) || t.name || 'unknown', level: typeof t.level !== 'undefined' ? t.level : null })) }; } catch (e) { _origConsole.error('Failed to read logger levels', e); return null; } } module.exports = { logger, requestLogger, setLogLevel, setTransportLevel, getLoggerLevels };