Initial commit: Telegram Management System
Some checks failed
Deploy / deploy (push) Has been cancelled
Some checks failed
Deploy / deploy (push) Has been cancelled
Full-stack web application for Telegram management - Frontend: Vue 3 + Vben Admin - Backend: NestJS - Features: User management, group broadcast, statistics 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
53
marketing-agent/services/api-gateway/Dockerfile
Normal file
53
marketing-agent/services/api-gateway/Dockerfile
Normal file
@@ -0,0 +1,53 @@
|
||||
# Build stage
|
||||
FROM node:18-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install all dependencies (including dev) for building
|
||||
RUN npm ci
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Production stage
|
||||
FROM node:18-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dumb-init for proper signal handling
|
||||
RUN apk add --no-cache dumb-init
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 -S nodejs && \
|
||||
adduser -S nodejs -u 1001
|
||||
|
||||
# Copy package files and install production dependencies only
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only=production && \
|
||||
npm cache clean --force
|
||||
|
||||
# Copy application code
|
||||
COPY --chown=nodejs:nodejs . .
|
||||
|
||||
# Create necessary directories with proper permissions
|
||||
RUN mkdir -p logs uploads && \
|
||||
chown -R nodejs:nodejs logs uploads
|
||||
|
||||
# Switch to non-root user
|
||||
USER nodejs
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
|
||||
CMD node healthcheck.js || exit 1
|
||||
|
||||
# Use dumb-init to handle signals properly
|
||||
ENTRYPOINT ["dumb-init", "--"]
|
||||
|
||||
# Start application
|
||||
CMD ["node", "src/app.js"]
|
||||
28
marketing-agent/services/api-gateway/healthcheck.js
Normal file
28
marketing-agent/services/api-gateway/healthcheck.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const http = require('http');
|
||||
|
||||
const options = {
|
||||
hostname: 'localhost',
|
||||
port: 3000,
|
||||
path: '/health',
|
||||
method: 'GET',
|
||||
timeout: 2000
|
||||
};
|
||||
|
||||
const req = http.request(options, (res) => {
|
||||
if (res.statusCode === 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
req.on('error', () => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
req.on('timeout', () => {
|
||||
req.abort();
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
req.end();
|
||||
56
marketing-agent/services/api-gateway/package.json
Normal file
56
marketing-agent/services/api-gateway/package.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "api-gateway",
|
||||
"version": "1.0.0",
|
||||
"description": "API Gateway for Marketing Agent System",
|
||||
"main": "src/app.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "node src/app.js",
|
||||
"dev": "nodemon src/app.js",
|
||||
"test": "jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"http-proxy-middleware": "^2.0.6",
|
||||
"helmet": "^7.0.0",
|
||||
"cors": "^2.8.5",
|
||||
"morgan": "^1.10.0",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"redis": "^4.6.5",
|
||||
"ioredis": "^5.3.1",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"joi": "^17.9.1",
|
||||
"winston": "^3.8.2",
|
||||
"winston-daily-rotate-file": "^4.7.1",
|
||||
"prom-client": "^14.2.0",
|
||||
"axios": "^1.4.0",
|
||||
"uuid": "^9.0.0",
|
||||
"dotenv": "^16.0.3",
|
||||
"swagger-ui-express": "^4.6.2",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"node-cache": "^5.1.2",
|
||||
"opossum": "^8.1.3",
|
||||
"express-request-id": "^3.0.0",
|
||||
"mongoose": "^7.4.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"express-validator": "^7.0.1",
|
||||
"isomorphic-dompurify": "^2.3.0",
|
||||
"hpp": "^0.2.3",
|
||||
"express-mongo-sanitize": "^2.2.0",
|
||||
"express-session": "^1.17.3",
|
||||
"connect-redis": "^7.1.0",
|
||||
"speakeasy": "^2.0.0",
|
||||
"qrcode": "^1.5.3",
|
||||
"archiver": "^6.0.1",
|
||||
"node-cron": "^3.0.2",
|
||||
"json2csv": "^6.0.0",
|
||||
"csv-parser": "^3.0.0",
|
||||
"exceljs": "^4.4.0",
|
||||
"multer": "^1.4.5-lts.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^2.0.22",
|
||||
"jest": "^29.5.0"
|
||||
}
|
||||
}
|
||||
101
marketing-agent/services/api-gateway/scripts/setup-security.js
Normal file
101
marketing-agent/services/api-gateway/scripts/setup-security.js
Normal file
@@ -0,0 +1,101 @@
|
||||
import mongoose from 'mongoose';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { config } from '../src/config/index.js';
|
||||
import { User } from '../src/models/User.js';
|
||||
import { Role } from '../src/models/Role.js';
|
||||
import { logger } from '../src/utils/logger.js';
|
||||
|
||||
async function setupSecurity() {
|
||||
try {
|
||||
// Connect to MongoDB
|
||||
await mongoose.connect(config.mongodb.uri);
|
||||
logger.info('Connected to MongoDB');
|
||||
|
||||
// Create default roles
|
||||
logger.info('Creating default roles...');
|
||||
await Role.createDefaultRoles();
|
||||
logger.info('Default roles created');
|
||||
|
||||
// Check if admin user exists
|
||||
const adminExists = await User.findOne({ username: 'admin' });
|
||||
|
||||
if (!adminExists) {
|
||||
// Create admin user
|
||||
const adminPassword = process.env.ADMIN_PASSWORD || 'Admin@123456';
|
||||
|
||||
const adminUser = new User({
|
||||
username: 'admin',
|
||||
email: 'admin@marketing-agent.com',
|
||||
password: adminPassword,
|
||||
role: 'admin',
|
||||
isActive: true,
|
||||
permissions: [{
|
||||
resource: '*',
|
||||
actions: ['create', 'read', 'update', 'delete', 'execute']
|
||||
}]
|
||||
});
|
||||
|
||||
await adminUser.save();
|
||||
logger.info('Admin user created');
|
||||
logger.info('Username: admin');
|
||||
logger.info('Password: ' + adminPassword);
|
||||
logger.info('Please change the password after first login');
|
||||
} else {
|
||||
logger.info('Admin user already exists');
|
||||
}
|
||||
|
||||
// Create sample users for testing
|
||||
const sampleUsers = [
|
||||
{
|
||||
username: 'manager',
|
||||
email: 'manager@marketing-agent.com',
|
||||
password: 'Manager@123',
|
||||
role: 'manager'
|
||||
},
|
||||
{
|
||||
username: 'operator',
|
||||
email: 'operator@marketing-agent.com',
|
||||
password: 'Operator@123',
|
||||
role: 'operator'
|
||||
},
|
||||
{
|
||||
username: 'viewer',
|
||||
email: 'viewer@marketing-agent.com',
|
||||
password: 'Viewer@123',
|
||||
role: 'viewer'
|
||||
}
|
||||
];
|
||||
|
||||
for (const userData of sampleUsers) {
|
||||
const exists = await User.findOne({ username: userData.username });
|
||||
if (!exists) {
|
||||
const user = new User(userData);
|
||||
await user.save();
|
||||
logger.info(`${userData.username} user created`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create security indices
|
||||
logger.info('Creating security indices...');
|
||||
|
||||
// Index for API key lookups
|
||||
await mongoose.connection.collection('users').createIndex({ 'apiKeys.key': 1 });
|
||||
|
||||
// Index for login rate limiting
|
||||
await mongoose.connection.collection('users').createIndex({
|
||||
username: 1,
|
||||
'metadata.lastLoginAttempt': -1
|
||||
});
|
||||
|
||||
logger.info('Security setup completed successfully');
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Security setup failed:', error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await mongoose.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the setup
|
||||
setupSecurity();
|
||||
213
marketing-agent/services/api-gateway/src/app.js
Normal file
213
marketing-agent/services/api-gateway/src/app.js
Normal file
@@ -0,0 +1,213 @@
|
||||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import helmet from 'helmet';
|
||||
import morgan from 'morgan';
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { config } from './config/index.js';
|
||||
import { logger, logRequest } from './utils/logger.js';
|
||||
import { swaggerSpec } from './config/swagger.js';
|
||||
import { globalRateLimiter, strictRateLimiter, dynamicRateLimiter } from './middleware/rateLimiter.js';
|
||||
import { serviceDiscovery } from './services/serviceDiscovery.js';
|
||||
import { applySecurityMiddleware, errorLogger } from './middleware/security.js';
|
||||
import { authenticateApiKey, logApiKeyUsage } from './middleware/apiKey.js';
|
||||
import { sanitizeBody, preventSqlInjection, preventNoSqlInjection, preventCommandInjection, validateContentType } from './middleware/validation.js';
|
||||
import { authenticate } from './middleware/auth.js';
|
||||
import { tenantMiddleware, allowCrossTenant } from './middleware/tenantMiddleware.js';
|
||||
|
||||
// Import routes
|
||||
import authRoutes from './routes/auth.js';
|
||||
import proxyRoutes from './routes/proxy.js';
|
||||
import mockRoutes from './routes/mock.js';
|
||||
import usersRoutes from './routes/users.js';
|
||||
import monitoringRoutes from './routes/monitoring.js';
|
||||
import backupRoutes from './routes/backup.js';
|
||||
import dataExchangeRoutes from './routes/dataExchange.js';
|
||||
import tenantRoutes from './routes/tenants.js';
|
||||
|
||||
const app = express();
|
||||
|
||||
// Apply comprehensive security middleware
|
||||
applySecurityMiddleware(app);
|
||||
|
||||
// Content type validation
|
||||
app.use(validateContentType(['application/json', 'application/x-www-form-urlencoded']));
|
||||
|
||||
// Input sanitization and injection prevention
|
||||
app.use(sanitizeBody);
|
||||
app.use(preventSqlInjection);
|
||||
app.use(preventNoSqlInjection);
|
||||
app.use(preventCommandInjection);
|
||||
|
||||
// API key authentication (runs before JWT auth)
|
||||
app.use(authenticateApiKey);
|
||||
app.use(logApiKeyUsage);
|
||||
|
||||
// Tenant middleware - applies to all routes
|
||||
app.use(tenantMiddleware);
|
||||
app.use(allowCrossTenant);
|
||||
|
||||
// Logging
|
||||
app.use(morgan('combined', { stream: logger.stream }));
|
||||
|
||||
// Response time tracking and metrics
|
||||
app.use((req, res, next) => {
|
||||
const startTime = Date.now();
|
||||
res.on('finish', () => {
|
||||
const duration = Date.now() - startTime;
|
||||
logRequest(req, res, duration);
|
||||
|
||||
// Record metrics
|
||||
const { recordHttpMetrics } = require('./services/monitoring.js');
|
||||
recordHttpMetrics(req, res, duration);
|
||||
});
|
||||
next();
|
||||
});
|
||||
|
||||
// API Documentation
|
||||
app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerSpec, {
|
||||
customCss: '.swagger-ui .topbar { display: none }',
|
||||
customSiteTitle: 'Telegram Marketing API Docs'
|
||||
}));
|
||||
|
||||
// Health check
|
||||
app.get('/health', async (req, res) => {
|
||||
const health = serviceDiscovery.getAggregatedHealth();
|
||||
const status = health.status === 'healthy' ? 200 : 503;
|
||||
|
||||
res.status(status).json({
|
||||
status: health.status,
|
||||
service: 'api-gateway',
|
||||
version: '1.0.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: process.uptime(),
|
||||
services: health
|
||||
});
|
||||
});
|
||||
|
||||
// Metrics endpoint
|
||||
app.get('/metrics', async (req, res) => {
|
||||
try {
|
||||
const promClient = await import('prom-client');
|
||||
const register = new promClient.Registry();
|
||||
|
||||
// Collect default metrics
|
||||
promClient.collectDefaultMetrics({ register });
|
||||
|
||||
// Add custom metrics
|
||||
const httpRequestDuration = new promClient.Histogram({
|
||||
name: 'api_gateway_http_request_duration_seconds',
|
||||
help: 'Duration of HTTP requests in seconds',
|
||||
labelNames: ['method', 'route', 'status_code'],
|
||||
buckets: [0.1, 0.5, 1, 2, 5]
|
||||
});
|
||||
register.registerMetric(httpRequestDuration);
|
||||
|
||||
const metrics = await register.metrics();
|
||||
res.set('Content-Type', register.contentType);
|
||||
res.send(metrics);
|
||||
} catch (error) {
|
||||
logger.error('Failed to get metrics:', error);
|
||||
res.status(500).send('Failed to get metrics');
|
||||
}
|
||||
});
|
||||
|
||||
// API documentation JSON endpoint
|
||||
app.get('/api-docs.json', (req, res) => {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.send(swaggerSpec);
|
||||
});
|
||||
|
||||
// Routes with specific rate limiting
|
||||
app.use('/api/v1/auth', strictRateLimiter, authRoutes);
|
||||
app.use('/api/v1/tenants', tenantRoutes);
|
||||
app.use('/api/v1/users', authenticate, usersRoutes);
|
||||
app.use('/api/v1/monitoring', monitoringRoutes);
|
||||
app.use('/api/v1/backup', authenticate, backupRoutes);
|
||||
app.use('/api/v1/data-exchange', authenticate, dataExchangeRoutes);
|
||||
|
||||
// Apply dynamic rate limiting based on user tier
|
||||
app.use('/api/v1', dynamicRateLimiter);
|
||||
|
||||
// Proxy routes with global rate limiting
|
||||
app.use('/api/v1', globalRateLimiter, proxyRoutes);
|
||||
|
||||
// 404 handler
|
||||
app.use((req, res) => {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: 'Not found',
|
||||
path: req.path
|
||||
});
|
||||
});
|
||||
|
||||
// Error logging middleware
|
||||
app.use(errorLogger);
|
||||
|
||||
// Error handler
|
||||
app.use((err, req, res, next) => {
|
||||
// Don't leak error details in production
|
||||
const isDevelopment = config.environment === 'development';
|
||||
const status = err.status || err.statusCode || 500;
|
||||
const message = isDevelopment ? err.message : 'Internal server error';
|
||||
|
||||
res.status(status).json({
|
||||
success: false,
|
||||
error: message,
|
||||
requestId: req.id,
|
||||
...(isDevelopment && { stack: err.stack })
|
||||
});
|
||||
});
|
||||
|
||||
// Start server
|
||||
const PORT = config.port;
|
||||
app.listen(PORT, async () => {
|
||||
logger.info(`API Gateway running on port ${PORT}`);
|
||||
logger.info(`API Documentation available at http://localhost:${PORT}/api-docs`);
|
||||
|
||||
// Initialize monitoring
|
||||
const { initializeMonitoring, checkServiceHealth } = await import('./services/monitoring.js');
|
||||
initializeMonitoring();
|
||||
|
||||
// Start service health checks
|
||||
setInterval(() => {
|
||||
checkServiceHealth(config.services);
|
||||
}, config.healthCheck.interval);
|
||||
|
||||
// Initialize scheduler
|
||||
const { schedulerService } = await import('./services/scheduler.js');
|
||||
await schedulerService.initialize();
|
||||
logger.info('Scheduler service initialized');
|
||||
|
||||
// Create backup directory if it doesn't exist
|
||||
const fs = await import('fs/promises');
|
||||
await fs.mkdir('/backups', { recursive: true });
|
||||
logger.info('Backup directory ready');
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGTERM', async () => {
|
||||
logger.info('SIGTERM received, shutting down gracefully');
|
||||
|
||||
try {
|
||||
// Close cache connections
|
||||
const { cache } = await import('./utils/cache.js');
|
||||
await cache.close();
|
||||
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('Error during shutdown:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
logger.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
});
|
||||
|
||||
process.on('uncaughtException', (error) => {
|
||||
logger.error('Uncaught Exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
export default app;
|
||||
176
marketing-agent/services/api-gateway/src/config/index.js
Normal file
176
marketing-agent/services/api-gateway/src/config/index.js
Normal file
@@ -0,0 +1,176 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { securityConfig } from './security.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export const config = {
|
||||
port: process.env.PORT || 3000,
|
||||
|
||||
// Service URLs
|
||||
services: {
|
||||
orchestrator: {
|
||||
url: process.env.ORCHESTRATOR_URL || 'http://orchestrator:3001',
|
||||
timeout: 30000
|
||||
},
|
||||
claudeAgent: {
|
||||
url: process.env.CLAUDE_AGENT_URL || 'http://claude-agent:3002',
|
||||
timeout: 60000 // Longer timeout for AI operations
|
||||
},
|
||||
gramjsAdapter: {
|
||||
url: process.env.GRAMJS_ADAPTER_URL || 'http://gramjs-adapter:3003',
|
||||
timeout: 30000
|
||||
},
|
||||
safetyGuard: {
|
||||
url: process.env.SAFETY_GUARD_URL || 'http://safety-guard:3004',
|
||||
timeout: 10000
|
||||
},
|
||||
analytics: {
|
||||
url: process.env.ANALYTICS_URL || 'http://analytics:3005',
|
||||
timeout: 20000
|
||||
},
|
||||
complianceGuard: {
|
||||
url: process.env.COMPLIANCE_GUARD_URL || 'http://compliance-guard:3006',
|
||||
timeout: 15000
|
||||
},
|
||||
abTesting: {
|
||||
url: process.env.AB_TESTING_URL || 'http://ab-testing:3007',
|
||||
timeout: 10000
|
||||
},
|
||||
workflow: {
|
||||
url: process.env.WORKFLOW_URL || 'http://localhost:3008',
|
||||
timeout: 30000
|
||||
},
|
||||
webhook: {
|
||||
url: process.env.WEBHOOK_URL || 'http://localhost:3009',
|
||||
timeout: 30000
|
||||
},
|
||||
template: {
|
||||
url: process.env.TEMPLATE_URL || 'http://localhost:3010',
|
||||
timeout: 30000
|
||||
},
|
||||
i18n: {
|
||||
url: process.env.I18N_URL || 'http://localhost:3011',
|
||||
timeout: 30000
|
||||
},
|
||||
userManagement: {
|
||||
url: process.env.USER_MANAGEMENT_URL || 'http://localhost:3012',
|
||||
timeout: 30000
|
||||
},
|
||||
scheduler: {
|
||||
url: process.env.SCHEDULER_URL || 'http://localhost:3013',
|
||||
timeout: 30000
|
||||
},
|
||||
telegramSystem: {
|
||||
url: process.env.TELEGRAM_SYSTEM_URL || 'http://localhost:8080',
|
||||
timeout: 30000
|
||||
},
|
||||
logging: {
|
||||
url: process.env.LOGGING_URL || 'http://localhost:3014',
|
||||
timeout: 10000
|
||||
},
|
||||
billing: {
|
||||
url: process.env.BILLING_URL || 'http://localhost:3010',
|
||||
timeout: 30000
|
||||
}
|
||||
},
|
||||
|
||||
// JWT Configuration
|
||||
jwt: {
|
||||
secret: process.env.JWT_SECRET || 'your-secret-key',
|
||||
expiresIn: process.env.JWT_EXPIRES_IN || '24h',
|
||||
refreshExpiresIn: process.env.JWT_REFRESH_EXPIRES_IN || '7d'
|
||||
},
|
||||
|
||||
// Rate Limiting
|
||||
rateLimiting: securityConfig.rateLimiting.global,
|
||||
|
||||
// Redis Configuration
|
||||
redis: {
|
||||
host: process.env.REDIS_HOST || 'redis',
|
||||
port: process.env.REDIS_PORT || 6379,
|
||||
password: process.env.REDIS_PASSWORD || '',
|
||||
ttl: 3600 // 1 hour cache TTL
|
||||
},
|
||||
|
||||
// MongoDB Configuration
|
||||
mongodb: {
|
||||
uri: process.env.MONGODB_URI || 'mongodb://mongodb:27017/marketing_agent'
|
||||
},
|
||||
|
||||
// CORS Configuration
|
||||
cors: {
|
||||
...securityConfig.cors,
|
||||
origin: process.env.CORS_ORIGINS ?
|
||||
process.env.CORS_ORIGINS.split(',') :
|
||||
securityConfig.cors.allowedOrigins
|
||||
},
|
||||
|
||||
// Logging
|
||||
logging: {
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
format: process.env.LOG_FORMAT || 'json'
|
||||
},
|
||||
|
||||
// Circuit Breaker Configuration
|
||||
circuitBreaker: {
|
||||
timeout: 10000, // 10 seconds
|
||||
errorThreshold: 50, // 50% error rate
|
||||
resetTimeout: 30000 // 30 seconds
|
||||
},
|
||||
|
||||
// API Documentation
|
||||
swagger: {
|
||||
definition: {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Marketing Agent API Gateway',
|
||||
version: '1.0.0',
|
||||
description: 'Unified API Gateway for Telegram Marketing Agent System'
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: process.env.API_BASE_URL || 'http://localhost:3000',
|
||||
description: 'Development server'
|
||||
}
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT'
|
||||
},
|
||||
apiKey: {
|
||||
type: 'apiKey',
|
||||
in: 'header',
|
||||
name: 'X-API-Key'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
apis: ['./src/routes/*.js']
|
||||
},
|
||||
|
||||
// Health Check
|
||||
healthCheck: {
|
||||
interval: 30000, // 30 seconds
|
||||
timeout: 5000,
|
||||
unhealthyThreshold: 3,
|
||||
healthyThreshold: 2
|
||||
},
|
||||
|
||||
// Request Configuration
|
||||
request: {
|
||||
maxBodySize: '10mb',
|
||||
timeout: 30000 // Default 30 seconds
|
||||
},
|
||||
|
||||
// Security Configuration
|
||||
security: securityConfig,
|
||||
|
||||
// Environment
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
|
||||
// Trust proxy setting
|
||||
trustProxy: securityConfig.ipFiltering.trustProxy
|
||||
};
|
||||
276
marketing-agent/services/api-gateway/src/config/monitoring.js
Normal file
276
marketing-agent/services/api-gateway/src/config/monitoring.js
Normal file
@@ -0,0 +1,276 @@
|
||||
export const monitoringConfig = {
|
||||
// Alert thresholds
|
||||
alerts: {
|
||||
// HTTP errors
|
||||
errorRate: {
|
||||
threshold: 0.05, // 5% error rate
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'critical'
|
||||
},
|
||||
responseTime: {
|
||||
p95: {
|
||||
threshold: 1000, // 1 second
|
||||
window: 60000, // 1 minute
|
||||
severity: 'warning'
|
||||
},
|
||||
p99: {
|
||||
threshold: 2000, // 2 seconds
|
||||
window: 60000, // 1 minute
|
||||
severity: 'critical'
|
||||
}
|
||||
},
|
||||
|
||||
// System resources
|
||||
memory: {
|
||||
usage: {
|
||||
threshold: 0.9, // 90% usage
|
||||
window: 60000, // 1 minute
|
||||
severity: 'critical'
|
||||
},
|
||||
growth: {
|
||||
threshold: 0.1, // 10% growth per hour
|
||||
window: 3600000, // 1 hour
|
||||
severity: 'warning'
|
||||
}
|
||||
},
|
||||
cpu: {
|
||||
usage: {
|
||||
threshold: 0.8, // 80% usage
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'warning'
|
||||
},
|
||||
sustained: {
|
||||
threshold: 0.9, // 90% usage
|
||||
window: 600000, // 10 minutes
|
||||
severity: 'critical'
|
||||
}
|
||||
},
|
||||
|
||||
// Queue health
|
||||
queue: {
|
||||
backlog: {
|
||||
threshold: 1000, // 1000 items
|
||||
window: 600000, // 10 minutes
|
||||
severity: 'warning'
|
||||
},
|
||||
deadLetter: {
|
||||
threshold: 100, // 100 failed items
|
||||
window: 3600000, // 1 hour
|
||||
severity: 'critical'
|
||||
},
|
||||
processingTime: {
|
||||
threshold: 30000, // 30 seconds
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'warning'
|
||||
}
|
||||
},
|
||||
|
||||
// Authentication
|
||||
auth: {
|
||||
failures: {
|
||||
threshold: 10, // 10 failures
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'warning'
|
||||
},
|
||||
bruteForce: {
|
||||
threshold: 50, // 50 attempts
|
||||
window: 3600000, // 1 hour
|
||||
severity: 'critical'
|
||||
}
|
||||
},
|
||||
|
||||
// Rate limiting
|
||||
rateLimit: {
|
||||
violations: {
|
||||
threshold: 100, // 100 violations
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'warning'
|
||||
}
|
||||
},
|
||||
|
||||
// Service health
|
||||
service: {
|
||||
down: {
|
||||
threshold: 3, // 3 consecutive failures
|
||||
window: 180000, // 3 minutes
|
||||
severity: 'critical'
|
||||
},
|
||||
degraded: {
|
||||
threshold: 5, // 5 errors
|
||||
window: 300000, // 5 minutes
|
||||
severity: 'warning'
|
||||
}
|
||||
},
|
||||
|
||||
// Business metrics
|
||||
business: {
|
||||
campaignFailure: {
|
||||
threshold: 0.1, // 10% failure rate
|
||||
window: 3600000, // 1 hour
|
||||
severity: 'critical'
|
||||
},
|
||||
messageDeliveryFailure: {
|
||||
threshold: 0.05, // 5% failure rate
|
||||
window: 1800000, // 30 minutes
|
||||
severity: 'warning'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Notification channels
|
||||
notifications: {
|
||||
email: {
|
||||
enabled: process.env.ALERT_EMAIL_ENABLED === 'true',
|
||||
smtp: {
|
||||
host: process.env.SMTP_HOST,
|
||||
port: process.env.SMTP_PORT || 587,
|
||||
secure: process.env.SMTP_SECURE === 'true',
|
||||
auth: {
|
||||
user: process.env.SMTP_USER,
|
||||
pass: process.env.SMTP_PASS
|
||||
}
|
||||
},
|
||||
recipients: {
|
||||
critical: process.env.ALERT_EMAIL_CRITICAL?.split(',') || [],
|
||||
warning: process.env.ALERT_EMAIL_WARNING?.split(',') || [],
|
||||
info: process.env.ALERT_EMAIL_INFO?.split(',') || []
|
||||
}
|
||||
},
|
||||
|
||||
slack: {
|
||||
enabled: process.env.ALERT_SLACK_ENABLED === 'true',
|
||||
webhook: process.env.SLACK_WEBHOOK_URL,
|
||||
channels: {
|
||||
critical: process.env.SLACK_CHANNEL_CRITICAL || '#alerts-critical',
|
||||
warning: process.env.SLACK_CHANNEL_WARNING || '#alerts-warning',
|
||||
info: process.env.SLACK_CHANNEL_INFO || '#alerts-info'
|
||||
}
|
||||
},
|
||||
|
||||
webhook: {
|
||||
enabled: process.env.ALERT_WEBHOOK_ENABLED === 'true',
|
||||
urls: {
|
||||
critical: process.env.WEBHOOK_URL_CRITICAL,
|
||||
warning: process.env.WEBHOOK_URL_WARNING,
|
||||
info: process.env.WEBHOOK_URL_INFO
|
||||
}
|
||||
},
|
||||
|
||||
telegram: {
|
||||
enabled: process.env.ALERT_TELEGRAM_ENABLED === 'true',
|
||||
botToken: process.env.TELEGRAM_BOT_TOKEN,
|
||||
chats: {
|
||||
critical: process.env.TELEGRAM_CHAT_CRITICAL,
|
||||
warning: process.env.TELEGRAM_CHAT_WARNING,
|
||||
info: process.env.TELEGRAM_CHAT_INFO
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Metrics collection
|
||||
metrics: {
|
||||
// Prometheus configuration
|
||||
prometheus: {
|
||||
enabled: true,
|
||||
port: process.env.METRICS_PORT || 9090,
|
||||
path: '/metrics',
|
||||
defaultLabels: {
|
||||
service: 'marketing-agent',
|
||||
environment: process.env.NODE_ENV || 'development'
|
||||
}
|
||||
},
|
||||
|
||||
// StatsD configuration (optional)
|
||||
statsd: {
|
||||
enabled: process.env.STATSD_ENABLED === 'true',
|
||||
host: process.env.STATSD_HOST || 'localhost',
|
||||
port: process.env.STATSD_PORT || 8125,
|
||||
prefix: 'marketing_agent.'
|
||||
},
|
||||
|
||||
// Custom metrics export
|
||||
export: {
|
||||
interval: 60000, // Export every minute
|
||||
retention: 86400000, // Keep for 24 hours
|
||||
aggregation: {
|
||||
percentiles: [0.5, 0.75, 0.9, 0.95, 0.99],
|
||||
intervals: ['1m', '5m', '15m', '1h', '24h']
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Logging configuration
|
||||
logging: {
|
||||
// Log aggregation
|
||||
aggregation: {
|
||||
enabled: true,
|
||||
maxSize: 10000, // Max logs in memory
|
||||
flushInterval: 5000, // Flush every 5 seconds
|
||||
retention: 604800000 // Keep for 7 days
|
||||
},
|
||||
|
||||
// Log levels for different components
|
||||
levels: {
|
||||
http: process.env.LOG_LEVEL_HTTP || 'info',
|
||||
business: process.env.LOG_LEVEL_BUSINESS || 'info',
|
||||
system: process.env.LOG_LEVEL_SYSTEM || 'warn',
|
||||
security: process.env.LOG_LEVEL_SECURITY || 'info'
|
||||
},
|
||||
|
||||
// Sensitive data filtering
|
||||
filters: {
|
||||
patterns: [
|
||||
/password/i,
|
||||
/token/i,
|
||||
/secret/i,
|
||||
/api[_-]?key/i,
|
||||
/authorization/i
|
||||
],
|
||||
replacement: '[REDACTED]'
|
||||
}
|
||||
},
|
||||
|
||||
// Dashboard configuration
|
||||
dashboard: {
|
||||
refreshInterval: 5000, // 5 seconds
|
||||
maxDataPoints: 100,
|
||||
widgets: [
|
||||
'system_health',
|
||||
'http_metrics',
|
||||
'business_metrics',
|
||||
'queue_status',
|
||||
'error_rate',
|
||||
'active_campaigns',
|
||||
'message_throughput',
|
||||
'authentication_stats',
|
||||
'rate_limit_stats',
|
||||
'alerts'
|
||||
]
|
||||
},
|
||||
|
||||
// Health check configuration
|
||||
healthCheck: {
|
||||
interval: 30000, // 30 seconds
|
||||
timeout: 5000, // 5 seconds
|
||||
endpoints: {
|
||||
'/health': {
|
||||
basic: true,
|
||||
detailed: false
|
||||
},
|
||||
'/health/detailed': {
|
||||
basic: false,
|
||||
detailed: true,
|
||||
requireAuth: true
|
||||
}
|
||||
},
|
||||
checks: [
|
||||
'database',
|
||||
'cache',
|
||||
'queue',
|
||||
'external_services',
|
||||
'disk_space',
|
||||
'memory',
|
||||
'cpu'
|
||||
]
|
||||
}
|
||||
};
|
||||
164
marketing-agent/services/api-gateway/src/config/security.js
Normal file
164
marketing-agent/services/api-gateway/src/config/security.js
Normal file
@@ -0,0 +1,164 @@
|
||||
export const securityConfig = {
|
||||
// Rate limiting configurations
|
||||
rateLimiting: {
|
||||
global: {
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 100, // 100 requests per window
|
||||
message: 'Too many requests from this IP',
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false
|
||||
},
|
||||
strict: {
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 10, // 10 requests per window
|
||||
message: 'Too many requests to sensitive endpoint'
|
||||
},
|
||||
endpoints: {
|
||||
'/api/v1/auth/login': { windowMs: 15 * 60 * 1000, max: 5 },
|
||||
'/api/v1/auth/register': { windowMs: 60 * 60 * 1000, max: 3 },
|
||||
'/api/v1/campaigns': { windowMs: 60 * 1000, max: 30 },
|
||||
'/api/v1/messages/send': { windowMs: 60 * 1000, max: 10 },
|
||||
'/api/v1/analytics': { windowMs: 60 * 1000, max: 60 }
|
||||
},
|
||||
tiers: {
|
||||
free: { windowMs: 15 * 60 * 1000, max: 50 },
|
||||
basic: { windowMs: 15 * 60 * 1000, max: 200 },
|
||||
premium: { windowMs: 15 * 60 * 1000, max: 1000 },
|
||||
enterprise: { windowMs: 15 * 60 * 1000, max: 5000 }
|
||||
}
|
||||
},
|
||||
|
||||
// CORS configuration
|
||||
cors: {
|
||||
allowedOrigins: [
|
||||
'http://localhost:8080',
|
||||
'http://localhost:3000',
|
||||
'https://app.marketing-agent.com'
|
||||
],
|
||||
credentials: true,
|
||||
methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization', 'X-API-Key', 'X-Request-ID', 'X-CSRF-Token'],
|
||||
exposedHeaders: ['X-Request-ID', 'X-RateLimit-Limit', 'X-RateLimit-Remaining', 'X-RateLimit-Reset'],
|
||||
maxAge: 86400 // 24 hours
|
||||
},
|
||||
|
||||
// JWT configuration
|
||||
jwt: {
|
||||
accessTokenExpiry: '30m',
|
||||
refreshTokenExpiry: '7d',
|
||||
algorithm: 'HS256',
|
||||
issuer: 'marketing-agent',
|
||||
audience: 'marketing-agent-api'
|
||||
},
|
||||
|
||||
// Password policy
|
||||
passwordPolicy: {
|
||||
minLength: 8,
|
||||
requireUppercase: true,
|
||||
requireLowercase: true,
|
||||
requireNumbers: true,
|
||||
requireSpecial: true,
|
||||
maxLoginAttempts: 5,
|
||||
lockoutDuration: 2 * 60 * 60 * 1000, // 2 hours
|
||||
passwordHistory: 5 // Remember last 5 passwords
|
||||
},
|
||||
|
||||
// API key configuration
|
||||
apiKey: {
|
||||
length: 32, // bytes
|
||||
defaultExpiry: 365, // days
|
||||
maxKeysPerUser: 10,
|
||||
permissions: ['read', 'write', 'delete', 'execute', 'admin']
|
||||
},
|
||||
|
||||
// Session configuration
|
||||
session: {
|
||||
secret: process.env.SESSION_SECRET || 'change-this-secret-in-production',
|
||||
name: 'marketing.sid',
|
||||
cookie: {
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
httpOnly: true,
|
||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
||||
sameSite: 'strict'
|
||||
},
|
||||
resave: false,
|
||||
saveUninitialized: false
|
||||
},
|
||||
|
||||
// Content Security Policy
|
||||
csp: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", "data:", "https:"],
|
||||
connectSrc: ["'self'"],
|
||||
fontSrc: ["'self'"],
|
||||
objectSrc: ["'none'"],
|
||||
mediaSrc: ["'self'"],
|
||||
frameSrc: ["'none'"],
|
||||
upgradeInsecureRequests: process.env.NODE_ENV === 'production' ? [] : null
|
||||
},
|
||||
|
||||
// Security headers
|
||||
headers: {
|
||||
hsts: {
|
||||
maxAge: 31536000,
|
||||
includeSubDomains: true,
|
||||
preload: true
|
||||
},
|
||||
xssProtection: '1; mode=block',
|
||||
contentTypeOptions: 'nosniff',
|
||||
frameOptions: 'DENY',
|
||||
referrerPolicy: 'strict-origin-when-cross-origin',
|
||||
permissionsPolicy: 'geolocation=(), microphone=(), camera=()'
|
||||
},
|
||||
|
||||
// IP filtering
|
||||
ipFiltering: {
|
||||
enabled: false,
|
||||
whitelist: [],
|
||||
blacklist: [],
|
||||
trustProxy: ['loopback', 'linklocal', 'uniquelocal']
|
||||
},
|
||||
|
||||
// Request size limits
|
||||
requestLimits: {
|
||||
json: '10mb',
|
||||
urlencoded: '10mb',
|
||||
raw: '20mb',
|
||||
text: '1mb'
|
||||
},
|
||||
|
||||
// Audit logging
|
||||
audit: {
|
||||
enabled: true,
|
||||
events: [
|
||||
'login',
|
||||
'logout',
|
||||
'password_change',
|
||||
'permission_change',
|
||||
'api_key_created',
|
||||
'api_key_revoked',
|
||||
'account_locked',
|
||||
'suspicious_activity'
|
||||
],
|
||||
retention: 90 // days
|
||||
},
|
||||
|
||||
// Two-factor authentication
|
||||
twoFactor: {
|
||||
enabled: true,
|
||||
issuer: 'Marketing Agent',
|
||||
window: 2, // Time window in 30s intervals
|
||||
qrCodeSize: 200
|
||||
},
|
||||
|
||||
// Encryption settings
|
||||
encryption: {
|
||||
algorithm: 'aes-256-gcm',
|
||||
keyDerivation: 'pbkdf2',
|
||||
iterations: 100000,
|
||||
saltLength: 32,
|
||||
tagLength: 16
|
||||
}
|
||||
};
|
||||
340
marketing-agent/services/api-gateway/src/config/swagger.js
Normal file
340
marketing-agent/services/api-gateway/src/config/swagger.js
Normal file
@@ -0,0 +1,340 @@
|
||||
import swaggerJsdoc from 'swagger-jsdoc';
|
||||
import { config } from './index.js';
|
||||
|
||||
const options = {
|
||||
definition: {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Telegram Marketing Agent API',
|
||||
version: '1.0.0',
|
||||
description: 'Comprehensive API for managing Telegram marketing campaigns, user segmentation, and analytics',
|
||||
contact: {
|
||||
name: 'API Support',
|
||||
email: 'api-support@example.com'
|
||||
},
|
||||
license: {
|
||||
name: 'MIT',
|
||||
url: 'https://opensource.org/licenses/MIT'
|
||||
}
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: 'http://localhost:3000/api/v1',
|
||||
description: 'Development server'
|
||||
},
|
||||
{
|
||||
url: 'https://api.example.com/v1',
|
||||
description: 'Production server'
|
||||
}
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
description: 'Enter JWT token'
|
||||
},
|
||||
apiKey: {
|
||||
type: 'apiKey',
|
||||
in: 'header',
|
||||
name: 'X-API-Key',
|
||||
description: 'API Key authentication'
|
||||
}
|
||||
},
|
||||
schemas: {
|
||||
Error: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: {
|
||||
type: 'boolean',
|
||||
example: false
|
||||
},
|
||||
error: {
|
||||
type: 'string',
|
||||
example: 'Error message'
|
||||
},
|
||||
code: {
|
||||
type: 'string',
|
||||
example: 'ERROR_CODE'
|
||||
},
|
||||
details: {
|
||||
type: 'object'
|
||||
}
|
||||
}
|
||||
},
|
||||
Pagination: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
page: {
|
||||
type: 'integer',
|
||||
example: 1
|
||||
},
|
||||
limit: {
|
||||
type: 'integer',
|
||||
example: 20
|
||||
},
|
||||
total: {
|
||||
type: 'integer',
|
||||
example: 100
|
||||
},
|
||||
pages: {
|
||||
type: 'integer',
|
||||
example: 5
|
||||
}
|
||||
}
|
||||
},
|
||||
User: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
example: 'user123'
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
example: 'johndoe'
|
||||
},
|
||||
email: {
|
||||
type: 'string',
|
||||
format: 'email',
|
||||
example: 'john@example.com'
|
||||
},
|
||||
role: {
|
||||
type: 'string',
|
||||
enum: ['admin', 'user', 'manager'],
|
||||
example: 'user'
|
||||
},
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
format: 'date-time'
|
||||
}
|
||||
}
|
||||
},
|
||||
Campaign: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
example: 'camp123'
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
example: 'Summer Sale Campaign'
|
||||
},
|
||||
description: {
|
||||
type: 'string'
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['message', 'invitation', 'data_collection', 'engagement', 'custom']
|
||||
},
|
||||
status: {
|
||||
type: 'string',
|
||||
enum: ['draft', 'active', 'paused', 'completed', 'cancelled']
|
||||
},
|
||||
goals: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
targetAudience: {
|
||||
type: 'integer'
|
||||
},
|
||||
conversionRate: {
|
||||
type: 'number'
|
||||
},
|
||||
revenue: {
|
||||
type: 'number'
|
||||
}
|
||||
}
|
||||
},
|
||||
statistics: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
messagesSent: {
|
||||
type: 'integer'
|
||||
},
|
||||
delivered: {
|
||||
type: 'integer'
|
||||
},
|
||||
conversions: {
|
||||
type: 'integer'
|
||||
}
|
||||
}
|
||||
},
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
format: 'date-time'
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
format: 'date-time'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
parameters: {
|
||||
pageParam: {
|
||||
in: 'query',
|
||||
name: 'page',
|
||||
schema: {
|
||||
type: 'integer',
|
||||
default: 1
|
||||
},
|
||||
description: 'Page number'
|
||||
},
|
||||
limitParam: {
|
||||
in: 'query',
|
||||
name: 'limit',
|
||||
schema: {
|
||||
type: 'integer',
|
||||
default: 20,
|
||||
maximum: 100
|
||||
},
|
||||
description: 'Items per page'
|
||||
},
|
||||
sortParam: {
|
||||
in: 'query',
|
||||
name: 'sort',
|
||||
schema: {
|
||||
type: 'string'
|
||||
},
|
||||
description: 'Sort field (prefix with - for descending)'
|
||||
},
|
||||
searchParam: {
|
||||
in: 'query',
|
||||
name: 'search',
|
||||
schema: {
|
||||
type: 'string'
|
||||
},
|
||||
description: 'Search query'
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
UnauthorizedError: {
|
||||
description: 'Access token is missing or invalid',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
},
|
||||
example: {
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
code: 'UNAUTHORIZED'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
NotFoundError: {
|
||||
description: 'Resource not found',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
},
|
||||
example: {
|
||||
success: false,
|
||||
error: 'Resource not found',
|
||||
code: 'NOT_FOUND'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
ValidationError: {
|
||||
description: 'Validation error',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
},
|
||||
example: {
|
||||
success: false,
|
||||
error: 'Validation failed',
|
||||
code: 'VALIDATION_ERROR',
|
||||
details: {
|
||||
fields: {
|
||||
email: 'Invalid email format'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
RateLimitError: {
|
||||
description: 'Too many requests',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
},
|
||||
example: {
|
||||
success: false,
|
||||
error: 'Too many requests',
|
||||
code: 'RATE_LIMIT_EXCEEDED',
|
||||
details: {
|
||||
retryAfter: 60
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
name: 'Authentication',
|
||||
description: 'User authentication and authorization'
|
||||
},
|
||||
{
|
||||
name: 'Campaigns',
|
||||
description: 'Campaign management operations'
|
||||
},
|
||||
{
|
||||
name: 'Users',
|
||||
description: 'User management and segmentation'
|
||||
},
|
||||
{
|
||||
name: 'Analytics',
|
||||
description: 'Analytics and reporting'
|
||||
},
|
||||
{
|
||||
name: 'Templates',
|
||||
description: 'Message template management'
|
||||
},
|
||||
{
|
||||
name: 'Scheduled Campaigns',
|
||||
description: 'Campaign scheduling operations'
|
||||
},
|
||||
{
|
||||
name: 'A/B Testing',
|
||||
description: 'A/B testing and experiments'
|
||||
},
|
||||
{
|
||||
name: 'Workflows',
|
||||
description: 'Marketing automation workflows'
|
||||
},
|
||||
{
|
||||
name: 'Webhooks',
|
||||
description: 'Webhook configuration and management'
|
||||
},
|
||||
{
|
||||
name: 'Settings',
|
||||
description: 'System settings and configuration'
|
||||
}
|
||||
]
|
||||
},
|
||||
apis: [
|
||||
'./src/routes/*.js',
|
||||
'./src/routes/auth/*.js',
|
||||
'./src/routes/campaigns/*.js',
|
||||
'./src/routes/users/*.js',
|
||||
'./src/routes/analytics/*.js'
|
||||
]
|
||||
};
|
||||
|
||||
export const swaggerSpec = swaggerJsdoc(options);
|
||||
239
marketing-agent/services/api-gateway/src/middleware/apiKey.js
Normal file
239
marketing-agent/services/api-gateway/src/middleware/apiKey.js
Normal file
@@ -0,0 +1,239 @@
|
||||
import crypto from 'crypto';
|
||||
import { cache } from '../utils/cache.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
/**
|
||||
* API Key authentication middleware
|
||||
*/
|
||||
export async function authenticateApiKey(req, res, next) {
|
||||
const apiKey = req.headers['x-api-key'] || req.query.apiKey;
|
||||
|
||||
if (!apiKey) {
|
||||
return next(); // Continue to other auth methods
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if API key exists in cache
|
||||
const keyData = await cache.get(`apikey:${apiKey}`);
|
||||
|
||||
if (!keyData) {
|
||||
logger.warn('Invalid API key attempt', {
|
||||
apiKey: apiKey.substring(0, 8) + '...',
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid API key'
|
||||
});
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(keyData);
|
||||
|
||||
// Check if key is expired
|
||||
if (parsed.expiresAt && new Date(parsed.expiresAt) < new Date()) {
|
||||
logger.warn('Expired API key used', {
|
||||
keyName: parsed.name,
|
||||
userId: parsed.userId
|
||||
});
|
||||
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'API key expired'
|
||||
});
|
||||
}
|
||||
|
||||
// Update last used timestamp
|
||||
parsed.lastUsed = new Date();
|
||||
await cache.set(`apikey:${apiKey}`, JSON.stringify(parsed), 365 * 24 * 60 * 60);
|
||||
|
||||
// Increment usage counter
|
||||
await cache.incr(`apikey:${apiKey}:usage:${new Date().toISOString().split('T')[0]}`);
|
||||
|
||||
// Set API key context
|
||||
req.apiKey = {
|
||||
key: apiKey.substring(0, 8) + '...',
|
||||
name: parsed.name,
|
||||
userId: parsed.userId,
|
||||
accountId: parsed.accountId,
|
||||
permissions: parsed.permissions || ['read']
|
||||
};
|
||||
|
||||
// Set user context for compatibility
|
||||
req.user = {
|
||||
id: parsed.userId,
|
||||
accountId: parsed.accountId,
|
||||
role: 'api',
|
||||
permissions: parsed.permissions
|
||||
};
|
||||
|
||||
logger.info('API key authenticated', {
|
||||
keyName: parsed.name,
|
||||
userId: parsed.userId,
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('API key authentication error', error);
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Authentication failed'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Require API key authentication
|
||||
*/
|
||||
export function requireApiKey(req, res, next) {
|
||||
if (!req.apiKey) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'API key required'
|
||||
});
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check API key permissions
|
||||
*/
|
||||
export function checkApiKeyPermission(...permissions) {
|
||||
return (req, res, next) => {
|
||||
if (!req.apiKey) {
|
||||
return next(); // Not using API key auth
|
||||
}
|
||||
|
||||
const hasAllPermissions = permissions.every(perm =>
|
||||
req.apiKey.permissions.includes(perm) ||
|
||||
req.apiKey.permissions.includes('all')
|
||||
);
|
||||
|
||||
if (!hasAllPermissions) {
|
||||
logger.warn('Insufficient API key permissions', {
|
||||
required: permissions,
|
||||
actual: req.apiKey.permissions,
|
||||
keyName: req.apiKey.name
|
||||
});
|
||||
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Insufficient API key permissions',
|
||||
required: permissions,
|
||||
current: req.apiKey.permissions
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* API key rate limiting
|
||||
*/
|
||||
export async function apiKeyRateLimit(options = {}) {
|
||||
const {
|
||||
windowMs = 60000, // 1 minute
|
||||
max = 100,
|
||||
keyBased = true // Rate limit per key vs per user
|
||||
} = options;
|
||||
|
||||
return async (req, res, next) => {
|
||||
if (!req.apiKey) {
|
||||
return next(); // Not using API key auth
|
||||
}
|
||||
|
||||
const rateLimitKey = keyBased
|
||||
? `ratelimit:apikey:${req.apiKey.key}`
|
||||
: `ratelimit:user:${req.apiKey.userId}`;
|
||||
|
||||
try {
|
||||
const current = await cache.incr(rateLimitKey);
|
||||
|
||||
if (current === 1) {
|
||||
await cache.expire(rateLimitKey, Math.ceil(windowMs / 1000));
|
||||
}
|
||||
|
||||
const ttl = await cache.ttl(rateLimitKey);
|
||||
const resetTime = Date.now() + (ttl * 1000);
|
||||
|
||||
res.setHeader('X-RateLimit-Limit', max);
|
||||
res.setHeader('X-RateLimit-Remaining', Math.max(0, max - current));
|
||||
res.setHeader('X-RateLimit-Reset', new Date(resetTime).toISOString());
|
||||
|
||||
if (current > max) {
|
||||
logger.warn('API key rate limit exceeded', {
|
||||
keyName: req.apiKey.name,
|
||||
userId: req.apiKey.userId,
|
||||
requests: current
|
||||
});
|
||||
|
||||
return res.status(429).json({
|
||||
success: false,
|
||||
error: 'Rate limit exceeded',
|
||||
retryAfter: ttl
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('API key rate limit error', error);
|
||||
next(); // Allow on error
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Log API key usage
|
||||
*/
|
||||
export async function logApiKeyUsage(req, res, next) {
|
||||
if (!req.apiKey) {
|
||||
return next();
|
||||
}
|
||||
|
||||
res.on('finish', async () => {
|
||||
try {
|
||||
const usage = {
|
||||
apiKey: req.apiKey.key,
|
||||
keyName: req.apiKey.name,
|
||||
userId: req.apiKey.userId,
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
statusCode: res.statusCode,
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Store in usage log
|
||||
const logKey = `apikey:usage:${new Date().toISOString().split('T')[0]}`;
|
||||
await cache.lpush(logKey, JSON.stringify(usage));
|
||||
await cache.expire(logKey, 30 * 24 * 60 * 60); // Keep for 30 days
|
||||
|
||||
// Update statistics
|
||||
await cache.hincrby(`apikey:stats:${req.apiKey.key}`, 'totalRequests', 1);
|
||||
await cache.hincrby(`apikey:stats:${req.apiKey.key}`, `status:${res.statusCode}`, 1);
|
||||
await cache.hincrby(`apikey:stats:${req.apiKey.key}`, `method:${req.method}`, 1);
|
||||
} catch (error) {
|
||||
logger.error('Failed to log API key usage', error);
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate API key
|
||||
*/
|
||||
export function generateApiKey() {
|
||||
return crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate API key format
|
||||
*/
|
||||
export function validateApiKeyFormat(apiKey) {
|
||||
return /^[a-f0-9]{64}$/.test(apiKey);
|
||||
}
|
||||
233
marketing-agent/services/api-gateway/src/middleware/auth.js
Normal file
233
marketing-agent/services/api-gateway/src/middleware/auth.js
Normal file
@@ -0,0 +1,233 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { config } from '../config/index.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
|
||||
/**
|
||||
* JWT authentication middleware
|
||||
*/
|
||||
export const authenticate = async (req, res, next) => {
|
||||
try {
|
||||
// Extract token from header
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'No token provided'
|
||||
});
|
||||
}
|
||||
|
||||
const token = authHeader.substring(7);
|
||||
|
||||
// Check token blacklist
|
||||
const isBlacklisted = await cache.get(`blacklist:${token}`);
|
||||
if (isBlacklisted) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Token has been revoked'
|
||||
});
|
||||
}
|
||||
|
||||
// Verify token
|
||||
const decoded = jwt.verify(token, config.jwt.secret);
|
||||
|
||||
// Check token expiration
|
||||
if (decoded.exp && decoded.exp < Date.now() / 1000) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Token expired'
|
||||
});
|
||||
}
|
||||
|
||||
// Attach user info to request
|
||||
req.user = {
|
||||
id: decoded.userId,
|
||||
accountId: decoded.accountId,
|
||||
role: decoded.role,
|
||||
permissions: decoded.permissions || []
|
||||
};
|
||||
|
||||
// Add token to request for logout functionality
|
||||
req.token = token;
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Authentication error:', error);
|
||||
|
||||
if (error.name === 'JsonWebTokenError') {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid token'
|
||||
});
|
||||
}
|
||||
|
||||
if (error.name === 'TokenExpiredError') {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Token expired'
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: 'Authentication failed'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* API key authentication middleware
|
||||
*/
|
||||
export const apiKeyAuth = async (req, res, next) => {
|
||||
try {
|
||||
const apiKey = req.headers['x-api-key'];
|
||||
|
||||
if (!apiKey) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'API key required'
|
||||
});
|
||||
}
|
||||
|
||||
// Check API key in cache first
|
||||
const cachedKey = await cache.get(`apikey:${apiKey}`);
|
||||
if (cachedKey) {
|
||||
req.apiKey = JSON.parse(cachedKey);
|
||||
return next();
|
||||
}
|
||||
|
||||
// In production, validate against database
|
||||
// For now, using a simple validation
|
||||
if (!isValidApiKey(apiKey)) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid API key'
|
||||
});
|
||||
}
|
||||
|
||||
// Cache valid API key
|
||||
const keyData = {
|
||||
key: apiKey,
|
||||
permissions: ['read', 'write'],
|
||||
rateLimit: 1000
|
||||
};
|
||||
|
||||
await cache.set(`apikey:${apiKey}`, JSON.stringify(keyData), 3600);
|
||||
req.apiKey = keyData;
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('API key authentication error:', error);
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: 'Authentication failed'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Optional authentication - doesn't fail if no token
|
||||
*/
|
||||
export const optionalAuth = async (req, res, next) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
const token = authHeader.substring(7);
|
||||
const decoded = jwt.verify(token, config.jwt.secret);
|
||||
|
||||
req.user = {
|
||||
id: decoded.userId,
|
||||
accountId: decoded.accountId,
|
||||
role: decoded.role,
|
||||
permissions: decoded.permissions || []
|
||||
};
|
||||
} catch (error) {
|
||||
// Ignore errors for optional auth
|
||||
logger.debug('Optional auth failed:', error.message);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Role-based access control middleware
|
||||
*/
|
||||
export const requireRole = (role) => {
|
||||
return (req, res, next) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
if (req.user.role !== role && req.user.role !== 'admin') {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Insufficient permissions'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Permission-based access control middleware
|
||||
*/
|
||||
export const requirePermission = (permission) => {
|
||||
return (req, res, next) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
const hasPermission = req.user.role === 'admin' ||
|
||||
(req.user.permissions && req.user.permissions.includes(permission));
|
||||
|
||||
if (!hasPermission) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: `Permission '${permission}' required`
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate JWT token
|
||||
*/
|
||||
export const generateToken = (payload) => {
|
||||
return jwt.sign(payload, config.jwt.secret, {
|
||||
expiresIn: config.jwt.expiresIn,
|
||||
issuer: 'api-gateway',
|
||||
audience: 'marketing-agent'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate refresh token
|
||||
*/
|
||||
export const generateRefreshToken = (payload) => {
|
||||
return jwt.sign(payload, config.jwt.secret, {
|
||||
expiresIn: config.jwt.refreshExpiresIn,
|
||||
issuer: 'api-gateway',
|
||||
audience: 'marketing-agent-refresh'
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Validate API key format
|
||||
*/
|
||||
function isValidApiKey(apiKey) {
|
||||
// Simple validation - in production, check against database
|
||||
return apiKey.length === 32 && /^[a-zA-Z0-9]+$/.test(apiKey);
|
||||
}
|
||||
@@ -0,0 +1,178 @@
|
||||
export function checkPermission(resource, action) {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if user has permission
|
||||
const hasPermission = req.user.hasPermission(resource, action);
|
||||
|
||||
if (!hasPermission) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Insufficient permissions',
|
||||
required: { resource, action }
|
||||
});
|
||||
}
|
||||
|
||||
// Add permission context to request
|
||||
req.permission = { resource, action };
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Permission check error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Permission check failed'
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function requireRole(...roles) {
|
||||
return (req, res, next) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
if (!roles.includes(req.user.role)) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Insufficient role privileges',
|
||||
required: roles,
|
||||
current: req.user.role
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
export function checkResourceOwnership(resourceField = 'userId') {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
// Admin can access all resources
|
||||
if (req.user.role === 'admin') {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Get resource ID from params or body
|
||||
const resourceUserId = req.params[resourceField] ||
|
||||
req.body[resourceField] ||
|
||||
req.query[resourceField];
|
||||
|
||||
if (!resourceUserId) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Resource user ID not provided'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if user owns the resource
|
||||
if (req.user._id.toString() !== resourceUserId) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied to this resource'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Resource ownership check error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Ownership check failed'
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function checkApiKeyPermission(...permissions) {
|
||||
return (req, res, next) => {
|
||||
if (!req.apiKey) {
|
||||
return next(); // Not using API key auth
|
||||
}
|
||||
|
||||
const hasAllPermissions = permissions.every(perm =>
|
||||
req.apiKey.permissions.includes(perm)
|
||||
);
|
||||
|
||||
if (!hasAllPermissions) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'API key lacks required permissions',
|
||||
required: permissions,
|
||||
current: req.apiKey.permissions
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
// Middleware to log permission usage
|
||||
export function logPermissionUsage(req, res, next) {
|
||||
if (req.permission && req.user) {
|
||||
console.log(`Permission used: ${req.user.username} -> ${req.permission.resource}:${req.permission.action}`);
|
||||
|
||||
// You can also emit this to analytics service
|
||||
// eventEmitter.emit('permission.used', {
|
||||
// userId: req.user._id,
|
||||
// resource: req.permission.resource,
|
||||
// action: req.permission.action,
|
||||
// timestamp: new Date()
|
||||
// });
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
// Dynamic permission checking based on request context
|
||||
export function checkDynamicPermission(permissionResolver) {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Authentication required'
|
||||
});
|
||||
}
|
||||
|
||||
// Resolve permission requirements dynamically
|
||||
const { resource, action, context } = await permissionResolver(req);
|
||||
|
||||
// Check permission with context
|
||||
const hasPermission = req.user.hasPermission(resource, action);
|
||||
|
||||
if (!hasPermission) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Insufficient permissions',
|
||||
required: { resource, action, context }
|
||||
});
|
||||
}
|
||||
|
||||
req.permission = { resource, action, context };
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Dynamic permission check error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Permission check failed'
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,183 @@
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import { config } from '../config/index.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
// Create Redis client for rate limiting
|
||||
const redisClient = new Redis({
|
||||
host: config.redis.host,
|
||||
port: config.redis.port,
|
||||
password: config.redis.password
|
||||
});
|
||||
|
||||
/**
|
||||
* Create rate limiter without Redis store for now
|
||||
*/
|
||||
export const createRateLimiter = (options = {}) => {
|
||||
return rateLimit({
|
||||
windowMs: options.windowMs || config.rateLimiting.windowMs,
|
||||
max: options.max || config.rateLimiting.max,
|
||||
message: options.message || config.rateLimiting.message,
|
||||
standardHeaders: config.rateLimiting.standardHeaders,
|
||||
legacyHeaders: config.rateLimiting.legacyHeaders,
|
||||
handler: (req, res) => {
|
||||
logger.warn('Rate limit exceeded', {
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
userId: req.user?.id
|
||||
});
|
||||
|
||||
res.status(429).json({
|
||||
success: false,
|
||||
error: 'Too many requests',
|
||||
retryAfter: res.getHeader('Retry-After')
|
||||
});
|
||||
},
|
||||
skip: (req) => {
|
||||
// Skip rate limiting for certain conditions
|
||||
if (options.skip) {
|
||||
return options.skip(req);
|
||||
}
|
||||
return false;
|
||||
},
|
||||
keyGenerator: (req) => {
|
||||
if (options.keyGenerator) {
|
||||
return options.keyGenerator(req);
|
||||
}
|
||||
// Default: use IP + user ID if authenticated
|
||||
return req.user ? `${req.ip}:${req.user.id}` : req.ip;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Global rate limiter
|
||||
*/
|
||||
export const globalRateLimiter = createRateLimiter();
|
||||
|
||||
/**
|
||||
* Strict rate limiter for sensitive endpoints
|
||||
*/
|
||||
export const strictRateLimiter = createRateLimiter({
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 10, // 10 requests per window
|
||||
message: 'Too many requests to sensitive endpoint'
|
||||
});
|
||||
|
||||
/**
|
||||
* API key based rate limiter
|
||||
*/
|
||||
export const apiKeyRateLimiter = createRateLimiter({
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
max: 100, // 100 requests per minute
|
||||
keyGenerator: (req) => req.apiKey?.key || req.ip,
|
||||
skip: (req) => !req.apiKey // Skip if not using API key auth
|
||||
});
|
||||
|
||||
/**
|
||||
* Dynamic rate limiter based on user tier
|
||||
*/
|
||||
export const dynamicRateLimiter = (req, res, next) => {
|
||||
const tier = req.user?.tier || 'free';
|
||||
|
||||
const limits = {
|
||||
free: { windowMs: 15 * 60 * 1000, max: 50 },
|
||||
basic: { windowMs: 15 * 60 * 1000, max: 200 },
|
||||
premium: { windowMs: 15 * 60 * 1000, max: 1000 },
|
||||
enterprise: { windowMs: 15 * 60 * 1000, max: 5000 }
|
||||
};
|
||||
|
||||
const limiter = createRateLimiter(limits[tier] || limits.free);
|
||||
limiter(req, res, next);
|
||||
};
|
||||
|
||||
/**
|
||||
* Endpoint-specific rate limiter factory
|
||||
*/
|
||||
export const endpointRateLimiter = (endpoint, options = {}) => {
|
||||
const endpointLimits = {
|
||||
'/api/v1/auth/login': { windowMs: 15 * 60 * 1000, max: 5 },
|
||||
'/api/v1/auth/register': { windowMs: 60 * 60 * 1000, max: 3 },
|
||||
'/api/v1/campaigns': { windowMs: 60 * 1000, max: 30 },
|
||||
'/api/v1/messages/send': { windowMs: 60 * 1000, max: 10 },
|
||||
'/api/v1/analytics': { windowMs: 60 * 1000, max: 60 }
|
||||
};
|
||||
|
||||
const config = endpointLimits[endpoint] || {};
|
||||
return createRateLimiter({ ...config, ...options });
|
||||
};
|
||||
|
||||
/**
|
||||
* Rate limit by resource
|
||||
*/
|
||||
export const resourceRateLimiter = (resourceType) => {
|
||||
return createRateLimiter({
|
||||
keyGenerator: (req) => {
|
||||
const resourceId = req.params.id || req.body.resourceId;
|
||||
return `${resourceType}:${resourceId}:${req.ip}`;
|
||||
},
|
||||
windowMs: 60 * 1000,
|
||||
max: 20,
|
||||
message: `Too many requests for this ${resourceType}`
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Sliding window rate limiter
|
||||
*/
|
||||
export class SlidingWindowRateLimiter {
|
||||
constructor(options = {}) {
|
||||
this.windowMs = options.windowMs || 60000; // 1 minute
|
||||
this.max = options.max || 100;
|
||||
this.redis = redisClient;
|
||||
}
|
||||
|
||||
async checkLimit(key) {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
const redisKey = `sliding:${key}`;
|
||||
|
||||
try {
|
||||
// Remove old entries
|
||||
await this.redis.zremrangebyscore(redisKey, '-inf', windowStart);
|
||||
|
||||
// Count current requests
|
||||
const count = await this.redis.zcard(redisKey);
|
||||
|
||||
if (count >= this.max) {
|
||||
return { allowed: false, remaining: 0, resetAt: now + this.windowMs };
|
||||
}
|
||||
|
||||
// Add current request
|
||||
await this.redis.zadd(redisKey, now, `${now}-${Math.random()}`);
|
||||
await this.redis.expire(redisKey, Math.ceil(this.windowMs / 1000));
|
||||
|
||||
return { allowed: true, remaining: this.max - count - 1, resetAt: now + this.windowMs };
|
||||
} catch (error) {
|
||||
logger.error('Sliding window rate limit error:', error);
|
||||
// Allow on error to prevent blocking legitimate requests
|
||||
return { allowed: true, remaining: this.max, resetAt: now + this.windowMs };
|
||||
}
|
||||
}
|
||||
|
||||
middleware() {
|
||||
return async (req, res, next) => {
|
||||
const key = req.user ? `${req.ip}:${req.user.id}` : req.ip;
|
||||
const result = await this.checkLimit(key);
|
||||
|
||||
res.setHeader('X-RateLimit-Limit', this.max);
|
||||
res.setHeader('X-RateLimit-Remaining', result.remaining);
|
||||
res.setHeader('X-RateLimit-Reset', new Date(result.resetAt).toISOString());
|
||||
|
||||
if (!result.allowed) {
|
||||
return res.status(429).json({
|
||||
success: false,
|
||||
error: 'Rate limit exceeded',
|
||||
retryAfter: Math.ceil((result.resetAt - Date.now()) / 1000)
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
}
|
||||
304
marketing-agent/services/api-gateway/src/middleware/security.js
Normal file
304
marketing-agent/services/api-gateway/src/middleware/security.js
Normal file
@@ -0,0 +1,304 @@
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
import hpp from 'hpp';
|
||||
import mongoSanitize from 'express-mongo-sanitize';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { config } from '../config/index.js';
|
||||
import crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* Configure CORS
|
||||
*/
|
||||
export const corsOptions = {
|
||||
origin: function (origin, callback) {
|
||||
const allowedOrigins = config.cors?.allowedOrigins || [
|
||||
'http://localhost:8080',
|
||||
'http://localhost:3000',
|
||||
'https://app.marketing-agent.com'
|
||||
];
|
||||
|
||||
// Allow requests with no origin (like mobile apps or Postman)
|
||||
if (!origin) return callback(null, true);
|
||||
|
||||
if (allowedOrigins.indexOf('*') !== -1 || allowedOrigins.indexOf(origin) !== -1) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
logger.warn('CORS blocked request', { origin, ip: origin });
|
||||
callback(new Error('Not allowed by CORS'));
|
||||
}
|
||||
},
|
||||
credentials: true,
|
||||
methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization', 'X-API-Key', 'X-Request-ID'],
|
||||
exposedHeaders: ['X-Request-ID', 'X-RateLimit-Limit', 'X-RateLimit-Remaining', 'X-RateLimit-Reset'],
|
||||
maxAge: 86400 // 24 hours
|
||||
};
|
||||
|
||||
/**
|
||||
* Configure Helmet security headers
|
||||
*/
|
||||
export const helmetConfig = helmet({
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", "data:", "https:"],
|
||||
connectSrc: ["'self'"],
|
||||
fontSrc: ["'self'"],
|
||||
objectSrc: ["'none'"],
|
||||
mediaSrc: ["'self'"],
|
||||
frameSrc: ["'none'"],
|
||||
upgradeInsecureRequests: config.environment === 'production' ? [] : null
|
||||
}
|
||||
},
|
||||
hsts: {
|
||||
maxAge: 31536000,
|
||||
includeSubDomains: true,
|
||||
preload: true
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Request ID middleware
|
||||
*/
|
||||
export function requestId(req, res, next) {
|
||||
const id = req.get('X-Request-ID') || crypto.randomUUID();
|
||||
req.id = id;
|
||||
res.setHeader('X-Request-ID', id);
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* IP whitelist/blacklist middleware
|
||||
*/
|
||||
export function ipFilter(options = {}) {
|
||||
const whitelist = options.whitelist || [];
|
||||
const blacklist = options.blacklist || [];
|
||||
|
||||
return (req, res, next) => {
|
||||
const clientIp = req.ip || req.connection.remoteAddress;
|
||||
|
||||
// Check blacklist first
|
||||
if (blacklist.length > 0 && blacklist.includes(clientIp)) {
|
||||
logger.warn('Blacklisted IP attempted access', { ip: clientIp });
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied'
|
||||
});
|
||||
}
|
||||
|
||||
// Check whitelist if configured
|
||||
if (whitelist.length > 0 && !whitelist.includes(clientIp)) {
|
||||
logger.warn('Non-whitelisted IP attempted access', { ip: clientIp });
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Security headers middleware
|
||||
*/
|
||||
export function securityHeaders(req, res, next) {
|
||||
// Additional security headers
|
||||
res.setHeader('X-Frame-Options', 'DENY');
|
||||
res.setHeader('X-Content-Type-Options', 'nosniff');
|
||||
res.setHeader('X-XSS-Protection', '1; mode=block');
|
||||
res.setHeader('Referrer-Policy', 'strict-origin-when-cross-origin');
|
||||
res.setHeader('Permissions-Policy', 'geolocation=(), microphone=(), camera=()');
|
||||
|
||||
// Remove potentially sensitive headers
|
||||
res.removeHeader('X-Powered-By');
|
||||
res.removeHeader('Server');
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* API version check middleware
|
||||
*/
|
||||
export function apiVersionCheck(supportedVersions = ['v1']) {
|
||||
return (req, res, next) => {
|
||||
const version = req.headers['api-version'] || req.query.apiVersion;
|
||||
|
||||
if (version && !supportedVersions.includes(version)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Unsupported API version',
|
||||
supportedVersions
|
||||
});
|
||||
}
|
||||
|
||||
req.apiVersion = version || supportedVersions[0];
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Prevent parameter pollution
|
||||
*/
|
||||
export const preventParamPollution = hpp({
|
||||
whitelist: ['sort', 'fields', 'filter', 'page', 'limit']
|
||||
});
|
||||
|
||||
/**
|
||||
* MongoDB injection prevention
|
||||
*/
|
||||
export const preventMongoInjection = mongoSanitize({
|
||||
replaceWith: '_',
|
||||
onSanitize: ({ req, key }) => {
|
||||
logger.warn('MongoDB injection attempt prevented', {
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
key
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Request logging middleware
|
||||
*/
|
||||
export function requestLogger(req, res, next) {
|
||||
const start = Date.now();
|
||||
|
||||
res.on('finish', () => {
|
||||
const duration = Date.now() - start;
|
||||
|
||||
logger.info('Request processed', {
|
||||
requestId: req.id,
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
statusCode: res.statusCode,
|
||||
duration,
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
userId: req.user?.id
|
||||
});
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Error logging middleware
|
||||
*/
|
||||
export function errorLogger(err, req, res, next) {
|
||||
logger.error('Request error', {
|
||||
requestId: req.id,
|
||||
error: err.message,
|
||||
stack: err.stack,
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
ip: req.ip,
|
||||
userId: req.user?.id
|
||||
});
|
||||
|
||||
next(err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Security audit middleware
|
||||
*/
|
||||
export function securityAudit(eventType) {
|
||||
return (req, res, next) => {
|
||||
logger.info('Security event', {
|
||||
eventType,
|
||||
requestId: req.id,
|
||||
userId: req.user?.id,
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
method: req.method,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trusted proxy configuration
|
||||
*/
|
||||
export function configureTrustedProxies(app) {
|
||||
// Trust proxies for accurate IP detection
|
||||
app.set('trust proxy', config.trustProxy || ['loopback', 'linklocal', 'uniquelocal']);
|
||||
}
|
||||
|
||||
/**
|
||||
* Session fixation prevention
|
||||
*/
|
||||
export function preventSessionFixation(req, res, next) {
|
||||
if (req.session && req.user) {
|
||||
// Regenerate session ID on login
|
||||
if (req.path.includes('/login') && res.statusCode === 200) {
|
||||
req.session.regenerate((err) => {
|
||||
if (err) {
|
||||
logger.error('Session regeneration failed', { error: err });
|
||||
}
|
||||
next();
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* CSRF protection for state-changing operations
|
||||
*/
|
||||
export function csrfProtection(options = {}) {
|
||||
const excludePaths = options.exclude || ['/api/v1/auth/login', '/api/v1/auth/register'];
|
||||
|
||||
return (req, res, next) => {
|
||||
// Skip for excluded paths
|
||||
if (excludePaths.some(path => req.path.includes(path))) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Skip for safe methods
|
||||
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const token = req.headers['x-csrf-token'] || req.body._csrf;
|
||||
const sessionToken = req.session?.csrfToken;
|
||||
|
||||
if (!token || !sessionToken || token !== sessionToken) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Invalid CSRF token'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply all security middleware
|
||||
*/
|
||||
export function applySecurityMiddleware(app) {
|
||||
// Basic security
|
||||
app.use(helmetConfig);
|
||||
app.use(cors(corsOptions));
|
||||
app.use(requestId);
|
||||
app.use(securityHeaders);
|
||||
|
||||
// Request processing
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
|
||||
// Injection prevention
|
||||
app.use(preventMongoInjection);
|
||||
app.use(preventParamPollution);
|
||||
|
||||
// Logging
|
||||
app.use(requestLogger);
|
||||
|
||||
// Configure trusted proxies
|
||||
configureTrustedProxies(app);
|
||||
}
|
||||
@@ -0,0 +1,263 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const Tenant = require('../models/Tenant');
|
||||
const User = require('../models/User');
|
||||
const { logger } = require('../utils/logger');
|
||||
|
||||
/**
|
||||
* Tenant isolation middleware
|
||||
* Extracts tenant context from request and validates access
|
||||
*/
|
||||
const tenantMiddleware = async (req, res, next) => {
|
||||
try {
|
||||
let tenantId = null;
|
||||
let tenantSlug = null;
|
||||
|
||||
// 1. Check subdomain (e.g., acme.app.com)
|
||||
const host = req.get('host');
|
||||
const subdomain = host.split('.')[0];
|
||||
if (subdomain && subdomain !== 'app' && subdomain !== 'www') {
|
||||
tenantSlug = subdomain;
|
||||
}
|
||||
|
||||
// 2. Check custom domain
|
||||
if (!tenantSlug) {
|
||||
const tenant = await Tenant.findByDomain(host);
|
||||
if (tenant) {
|
||||
tenantId = tenant._id;
|
||||
tenantSlug = tenant.slug;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Check header (for API access)
|
||||
if (!tenantId && req.headers['x-tenant-id']) {
|
||||
tenantId = req.headers['x-tenant-id'];
|
||||
}
|
||||
|
||||
// 4. Check URL parameter (for multi-tenant admin)
|
||||
if (!tenantId && req.query.tenant) {
|
||||
tenantSlug = req.query.tenant;
|
||||
}
|
||||
|
||||
// 5. Get from authenticated user's tenant
|
||||
if (!tenantId && req.user && req.user.tenantId) {
|
||||
tenantId = req.user.tenantId;
|
||||
}
|
||||
|
||||
// Load tenant by slug if we have it
|
||||
if (tenantSlug && !tenantId) {
|
||||
const tenant = await Tenant.findBySlug(tenantSlug);
|
||||
if (tenant) {
|
||||
tenantId = tenant._id;
|
||||
}
|
||||
}
|
||||
|
||||
// Load tenant by ID if we have it
|
||||
let tenant = null;
|
||||
if (tenantId) {
|
||||
tenant = await Tenant.findById(tenantId);
|
||||
if (!tenant || tenant.status === 'inactive') {
|
||||
return res.status(403).json({
|
||||
error: 'Tenant not found or inactive'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// For protected routes, tenant is required
|
||||
if (req.requireTenant && !tenant) {
|
||||
return res.status(400).json({
|
||||
error: 'Tenant context required'
|
||||
});
|
||||
}
|
||||
|
||||
// Check tenant status and limits
|
||||
if (tenant) {
|
||||
// Check if tenant is suspended
|
||||
if (tenant.status === 'suspended') {
|
||||
return res.status(403).json({
|
||||
error: 'Tenant account is suspended',
|
||||
reason: tenant.metadata?.suspensionReason
|
||||
});
|
||||
}
|
||||
|
||||
// Check if trial has expired
|
||||
if (tenant.status === 'trial' && !tenant.isTrialActive) {
|
||||
return res.status(403).json({
|
||||
error: 'Trial period has expired',
|
||||
upgradeUrl: `/billing/upgrade?tenant=${tenant.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
// Set tenant context
|
||||
req.tenant = tenant;
|
||||
req.tenantId = tenant._id.toString();
|
||||
|
||||
// Add tenant filter to all database queries
|
||||
if (req.method === 'GET') {
|
||||
req.query.tenantId = req.tenantId;
|
||||
} else if (req.body) {
|
||||
req.body.tenantId = req.tenantId;
|
||||
}
|
||||
|
||||
// Update last active timestamp
|
||||
tenant.lastActiveAt = new Date();
|
||||
await tenant.save();
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Tenant middleware error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Require tenant middleware
|
||||
* Use this for routes that must have tenant context
|
||||
*/
|
||||
const requireTenant = (req, res, next) => {
|
||||
req.requireTenant = true;
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Check tenant resource limits
|
||||
*/
|
||||
const checkTenantLimits = (resource) => {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
if (!req.tenant) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const tenant = req.tenant;
|
||||
|
||||
// Check if tenant can perform this action
|
||||
if (!tenant.checkLimit(resource)) {
|
||||
return res.status(429).json({
|
||||
error: 'Resource limit exceeded',
|
||||
resource,
|
||||
current: tenant.usage[resource],
|
||||
limit: tenant.limits[resource],
|
||||
upgradeUrl: `/billing/upgrade?tenant=${tenant.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
// For write operations, increment usage after success
|
||||
if (req.method !== 'GET') {
|
||||
res.on('finish', async () => {
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
try {
|
||||
await tenant.incrementUsage(resource);
|
||||
} catch (error) {
|
||||
logger.error('Failed to increment tenant usage:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Tenant limit check error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Check tenant feature access
|
||||
*/
|
||||
const requireFeature = (feature) => {
|
||||
return (req, res, next) => {
|
||||
if (!req.tenant) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (!req.tenant.features[feature]) {
|
||||
return res.status(403).json({
|
||||
error: 'Feature not available in your plan',
|
||||
feature,
|
||||
upgradeUrl: `/billing/upgrade?tenant=${req.tenant.slug}`
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Tenant admin middleware
|
||||
* Check if user is tenant owner or admin
|
||||
*/
|
||||
const requireTenantAdmin = async (req, res, next) => {
|
||||
try {
|
||||
if (!req.tenant || !req.user) {
|
||||
return res.status(403).json({
|
||||
error: 'Access denied'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if user is tenant owner
|
||||
if (req.tenant.owner.userId.toString() === req.user.id) {
|
||||
req.isTenantOwner = true;
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check if user has admin role for this tenant
|
||||
const user = await User.findById(req.user.id);
|
||||
if (user && user.tenantId.toString() === req.tenantId && user.role === 'admin') {
|
||||
req.isTenantAdmin = true;
|
||||
return next();
|
||||
}
|
||||
|
||||
res.status(403).json({
|
||||
error: 'Tenant admin access required'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Tenant admin check error:', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Cross-tenant access middleware
|
||||
* For super admin to access any tenant
|
||||
*/
|
||||
const allowCrossTenant = async (req, res, next) => {
|
||||
try {
|
||||
if (!req.user || req.user.role !== 'superadmin') {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Super admin can override tenant context
|
||||
if (req.headers['x-override-tenant-id']) {
|
||||
const overrideTenantId = req.headers['x-override-tenant-id'];
|
||||
const tenant = await Tenant.findById(overrideTenantId);
|
||||
|
||||
if (tenant) {
|
||||
req.tenant = tenant;
|
||||
req.tenantId = tenant._id.toString();
|
||||
req.isCrossTenantAccess = true;
|
||||
|
||||
logger.info('Cross-tenant access', {
|
||||
superAdminId: req.user.id,
|
||||
targetTenantId: overrideTenantId,
|
||||
action: req.method + ' ' + req.path
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Cross-tenant access error:', error);
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
tenantMiddleware,
|
||||
requireTenant,
|
||||
checkTenantLimits,
|
||||
requireFeature,
|
||||
requireTenantAdmin,
|
||||
allowCrossTenant
|
||||
};
|
||||
@@ -0,0 +1,321 @@
|
||||
import { validationResult } from 'express-validator';
|
||||
import DOMPurify from 'isomorphic-dompurify';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
/**
|
||||
* Validate request using express-validator
|
||||
*/
|
||||
export function validateRequest(validations) {
|
||||
return async (req, res, next) => {
|
||||
// Run all validations
|
||||
for (let validation of validations) {
|
||||
const result = await validation.run(req);
|
||||
if (!result.isEmpty()) break;
|
||||
}
|
||||
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
logger.warn('Validation error', {
|
||||
errors: errors.array(),
|
||||
path: req.path,
|
||||
method: req.method,
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Validation failed',
|
||||
details: errors.array().map(err => ({
|
||||
field: err.param,
|
||||
message: err.msg,
|
||||
value: err.value
|
||||
}))
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize HTML input to prevent XSS
|
||||
*/
|
||||
export function sanitizeHtml(input) {
|
||||
if (typeof input !== 'string') return input;
|
||||
return DOMPurify.sanitize(input, {
|
||||
ALLOWED_TAGS: [],
|
||||
ALLOWED_ATTR: []
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize object recursively
|
||||
*/
|
||||
export function sanitizeObject(obj) {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return typeof obj === 'string' ? sanitizeHtml(obj) : obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => sanitizeObject(item));
|
||||
}
|
||||
|
||||
const sanitized = {};
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
sanitized[key] = sanitizeObject(value);
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to sanitize request body
|
||||
*/
|
||||
export function sanitizeBody(req, res, next) {
|
||||
if (req.body) {
|
||||
req.body = sanitizeObject(req.body);
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate JSON Schema
|
||||
*/
|
||||
export function validateSchema(schema) {
|
||||
return (req, res, next) => {
|
||||
const { error, value } = schema.validate(req.body, {
|
||||
abortEarly: false,
|
||||
stripUnknown: true
|
||||
});
|
||||
|
||||
if (error) {
|
||||
const details = error.details.map(detail => ({
|
||||
field: detail.path.join('.'),
|
||||
message: detail.message
|
||||
}));
|
||||
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Schema validation failed',
|
||||
details
|
||||
});
|
||||
}
|
||||
|
||||
req.body = value;
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate content type
|
||||
*/
|
||||
export function validateContentType(allowedTypes = ['application/json']) {
|
||||
return (req, res, next) => {
|
||||
const contentType = req.get('content-type');
|
||||
|
||||
if (!contentType || !allowedTypes.some(type => contentType.includes(type))) {
|
||||
return res.status(415).json({
|
||||
success: false,
|
||||
error: 'Unsupported media type',
|
||||
expected: allowedTypes
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate request size
|
||||
*/
|
||||
export function validateRequestSize(maxSize = '1mb') {
|
||||
return (req, res, next) => {
|
||||
const contentLength = parseInt(req.get('content-length') || '0');
|
||||
const maxBytes = parseSize(maxSize);
|
||||
|
||||
if (contentLength > maxBytes) {
|
||||
return res.status(413).json({
|
||||
success: false,
|
||||
error: 'Request entity too large',
|
||||
maxSize
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL injection prevention
|
||||
*/
|
||||
export function preventSqlInjection(req, res, next) {
|
||||
const sqlPatterns = [
|
||||
/(\b(union|select|insert|update|delete|drop|create|alter|exec|execute)\b)/gi,
|
||||
/(--|\/\*|\*\/|xp_|sp_)/gi,
|
||||
/(\bor\b\s*\d+\s*=\s*\d+|\band\b\s*\d+\s*=\s*\d+)/gi
|
||||
];
|
||||
|
||||
const checkValue = (value) => {
|
||||
if (typeof value === 'string') {
|
||||
for (const pattern of sqlPatterns) {
|
||||
if (pattern.test(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const checkObject = (obj) => {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return checkValue(obj);
|
||||
}
|
||||
|
||||
for (const value of Object.values(obj)) {
|
||||
if (Array.isArray(value)) {
|
||||
if (value.some(item => checkObject(item))) return true;
|
||||
} else if (checkObject(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (checkObject(req.body) || checkObject(req.query) || checkObject(req.params)) {
|
||||
logger.warn('Potential SQL injection attempt', {
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
method: req.method
|
||||
});
|
||||
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid input detected'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* NoSQL injection prevention
|
||||
*/
|
||||
export function preventNoSqlInjection(req, res, next) {
|
||||
const checkObject = (obj) => {
|
||||
if (typeof obj !== 'object' || obj === null) return false;
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
// Check for MongoDB operators
|
||||
if (key.startsWith('$')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (typeof value === 'object') {
|
||||
if (checkObject(value)) return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (checkObject(req.body) || checkObject(req.query)) {
|
||||
logger.warn('Potential NoSQL injection attempt', {
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
method: req.method
|
||||
});
|
||||
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid input detected'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Command injection prevention
|
||||
*/
|
||||
export function preventCommandInjection(req, res, next) {
|
||||
const cmdPatterns = [
|
||||
/[;&|`$()]/g,
|
||||
/\b(rm|curl|wget|bash|sh|python|node|npm)\b/gi
|
||||
];
|
||||
|
||||
const checkValue = (value) => {
|
||||
if (typeof value === 'string') {
|
||||
for (const pattern of cmdPatterns) {
|
||||
if (pattern.test(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const checkObject = (obj) => {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return checkValue(obj);
|
||||
}
|
||||
|
||||
for (const value of Object.values(obj)) {
|
||||
if (Array.isArray(value)) {
|
||||
if (value.some(item => checkObject(item))) return true;
|
||||
} else if (checkObject(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (checkObject(req.body) || checkObject(req.query)) {
|
||||
logger.warn('Potential command injection attempt', {
|
||||
ip: req.ip,
|
||||
path: req.path,
|
||||
method: req.method
|
||||
});
|
||||
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid input detected'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate allowed fields
|
||||
*/
|
||||
export function allowedFields(fields) {
|
||||
return (req, res, next) => {
|
||||
const bodyKeys = Object.keys(req.body || {});
|
||||
const invalidFields = bodyKeys.filter(key => !fields.includes(key));
|
||||
|
||||
if (invalidFields.length > 0) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid fields in request',
|
||||
invalidFields,
|
||||
allowedFields: fields
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
// Helper function to parse size string
|
||||
function parseSize(size) {
|
||||
const units = {
|
||||
b: 1,
|
||||
kb: 1024,
|
||||
mb: 1024 * 1024,
|
||||
gb: 1024 * 1024 * 1024
|
||||
};
|
||||
|
||||
const match = size.toLowerCase().match(/^(\d+(?:\.\d+)?)\s*(b|kb|mb|gb)?$/);
|
||||
if (!match) return 1024 * 1024; // Default 1MB
|
||||
|
||||
const [, num, unit = 'b'] = match;
|
||||
return parseFloat(num) * units[unit];
|
||||
}
|
||||
189
marketing-agent/services/api-gateway/src/models/Role.js
Normal file
189
marketing-agent/services/api-gateway/src/models/Role.js
Normal file
@@ -0,0 +1,189 @@
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
const roleSchema = new mongoose.Schema({
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
trim: true
|
||||
},
|
||||
displayName: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
description: String,
|
||||
permissions: [{
|
||||
resource: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
actions: [{
|
||||
type: String,
|
||||
enum: ['create', 'read', 'update', 'delete', 'execute']
|
||||
}],
|
||||
conditions: {
|
||||
type: Map,
|
||||
of: mongoose.Schema.Types.Mixed
|
||||
}
|
||||
}],
|
||||
isSystem: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
priority: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
metadata: {
|
||||
createdBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
updatedBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
timestamps: true
|
||||
});
|
||||
|
||||
// Indexes
|
||||
roleSchema.index({ name: 1 });
|
||||
roleSchema.index({ isSystem: 1 });
|
||||
|
||||
// Pre-save validation
|
||||
roleSchema.pre('save', function(next) {
|
||||
if (this.isSystem && this.isModified('permissions')) {
|
||||
next(new Error('Cannot modify permissions for system roles'));
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
// Method to check if role has permission
|
||||
roleSchema.methods.hasPermission = function(resource, action, context = {}) {
|
||||
const permission = this.permissions.find(p => p.resource === resource);
|
||||
|
||||
if (!permission || !permission.actions.includes(action)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check conditions if any
|
||||
if (permission.conditions && permission.conditions.size > 0) {
|
||||
for (const [key, value] of permission.conditions) {
|
||||
if (context[key] !== value) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Static method to create default roles
|
||||
roleSchema.statics.createDefaultRoles = async function() {
|
||||
const defaultRoles = [
|
||||
{
|
||||
name: 'admin',
|
||||
displayName: 'Administrator',
|
||||
description: 'Full system access',
|
||||
isSystem: true,
|
||||
priority: 100,
|
||||
permissions: [
|
||||
{
|
||||
resource: '*',
|
||||
actions: ['create', 'read', 'update', 'delete', 'execute']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'manager',
|
||||
displayName: 'Campaign Manager',
|
||||
description: 'Manage campaigns and view analytics',
|
||||
isSystem: true,
|
||||
priority: 50,
|
||||
permissions: [
|
||||
{
|
||||
resource: 'campaigns',
|
||||
actions: ['create', 'read', 'update', 'delete', 'execute']
|
||||
},
|
||||
{
|
||||
resource: 'accounts',
|
||||
actions: ['create', 'read', 'update']
|
||||
},
|
||||
{
|
||||
resource: 'messages',
|
||||
actions: ['create', 'read', 'update', 'delete']
|
||||
},
|
||||
{
|
||||
resource: 'analytics',
|
||||
actions: ['read']
|
||||
},
|
||||
{
|
||||
resource: 'compliance',
|
||||
actions: ['read', 'update']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'operator',
|
||||
displayName: 'Campaign Operator',
|
||||
description: 'Execute campaigns and send messages',
|
||||
isSystem: true,
|
||||
priority: 30,
|
||||
permissions: [
|
||||
{
|
||||
resource: 'campaigns',
|
||||
actions: ['read', 'execute']
|
||||
},
|
||||
{
|
||||
resource: 'accounts',
|
||||
actions: ['read']
|
||||
},
|
||||
{
|
||||
resource: 'messages',
|
||||
actions: ['create', 'read']
|
||||
},
|
||||
{
|
||||
resource: 'analytics',
|
||||
actions: ['read']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'viewer',
|
||||
displayName: 'Viewer',
|
||||
description: 'View-only access',
|
||||
isSystem: true,
|
||||
priority: 10,
|
||||
permissions: [
|
||||
{
|
||||
resource: 'campaigns',
|
||||
actions: ['read']
|
||||
},
|
||||
{
|
||||
resource: 'accounts',
|
||||
actions: ['read']
|
||||
},
|
||||
{
|
||||
resource: 'messages',
|
||||
actions: ['read']
|
||||
},
|
||||
{
|
||||
resource: 'analytics',
|
||||
actions: ['read']
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
for (const roleData of defaultRoles) {
|
||||
await this.findOneAndUpdate(
|
||||
{ name: roleData.name },
|
||||
roleData,
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const Role = mongoose.model('Role', roleSchema);
|
||||
263
marketing-agent/services/api-gateway/src/models/Tenant.js
Normal file
263
marketing-agent/services/api-gateway/src/models/Tenant.js
Normal file
@@ -0,0 +1,263 @@
|
||||
const mongoose = require('mongoose');
|
||||
const bcrypt = require('bcryptjs');
|
||||
|
||||
const tenantSchema = new mongoose.Schema({
|
||||
// Basic Information
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true
|
||||
},
|
||||
slug: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
lowercase: true,
|
||||
trim: true,
|
||||
match: /^[a-z0-9-]+$/
|
||||
},
|
||||
domain: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
lowercase: true
|
||||
},
|
||||
|
||||
// Status
|
||||
status: {
|
||||
type: String,
|
||||
enum: ['active', 'suspended', 'inactive', 'trial'],
|
||||
default: 'trial'
|
||||
},
|
||||
|
||||
// Plan and Limits
|
||||
plan: {
|
||||
type: String,
|
||||
enum: ['free', 'starter', 'professional', 'enterprise', 'custom'],
|
||||
default: 'free'
|
||||
},
|
||||
limits: {
|
||||
users: { type: Number, default: 5 },
|
||||
campaigns: { type: Number, default: 10 },
|
||||
messagesPerMonth: { type: Number, default: 1000 },
|
||||
telegramAccounts: { type: Number, default: 1 },
|
||||
storage: { type: Number, default: 1073741824 }, // 1GB in bytes
|
||||
apiCallsPerHour: { type: Number, default: 1000 },
|
||||
webhooks: { type: Number, default: 5 },
|
||||
customIntegrations: { type: Boolean, default: false }
|
||||
},
|
||||
|
||||
// Usage Tracking
|
||||
usage: {
|
||||
users: { type: Number, default: 0 },
|
||||
campaigns: { type: Number, default: 0 },
|
||||
messagesThisMonth: { type: Number, default: 0 },
|
||||
storageUsed: { type: Number, default: 0 },
|
||||
lastResetDate: { type: Date, default: Date.now }
|
||||
},
|
||||
|
||||
// Billing Information
|
||||
billing: {
|
||||
customerId: String,
|
||||
subscriptionId: String,
|
||||
paymentMethod: String,
|
||||
billingEmail: String,
|
||||
billingAddress: {
|
||||
line1: String,
|
||||
line2: String,
|
||||
city: String,
|
||||
state: String,
|
||||
postalCode: String,
|
||||
country: String
|
||||
},
|
||||
nextBillingDate: Date,
|
||||
lastPaymentDate: Date,
|
||||
lastPaymentAmount: Number
|
||||
},
|
||||
|
||||
// Settings
|
||||
settings: {
|
||||
timezone: { type: String, default: 'UTC' },
|
||||
language: { type: String, default: 'en' },
|
||||
dateFormat: { type: String, default: 'YYYY-MM-DD' },
|
||||
timeFormat: { type: String, default: '24h' },
|
||||
currency: { type: String, default: 'USD' },
|
||||
allowSignup: { type: Boolean, default: false },
|
||||
requireEmailVerification: { type: Boolean, default: true },
|
||||
twoFactorAuth: { type: Boolean, default: false },
|
||||
ssoEnabled: { type: Boolean, default: false },
|
||||
ssoProvider: String,
|
||||
ssoConfig: mongoose.Schema.Types.Mixed
|
||||
},
|
||||
|
||||
// Branding
|
||||
branding: {
|
||||
logo: String,
|
||||
primaryColor: { type: String, default: '#3b82f6' },
|
||||
secondaryColor: { type: String, default: '#10b981' },
|
||||
customCss: String,
|
||||
emailFooter: String,
|
||||
supportEmail: String,
|
||||
supportUrl: String
|
||||
},
|
||||
|
||||
// Contact Information
|
||||
owner: {
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
name: String,
|
||||
email: String,
|
||||
phone: String
|
||||
},
|
||||
|
||||
// Features
|
||||
features: {
|
||||
campaigns: { type: Boolean, default: true },
|
||||
automation: { type: Boolean, default: false },
|
||||
analytics: { type: Boolean, default: true },
|
||||
abTesting: { type: Boolean, default: false },
|
||||
apiAccess: { type: Boolean, default: false },
|
||||
customReports: { type: Boolean, default: false },
|
||||
whiteLabel: { type: Boolean, default: false },
|
||||
multiLanguage: { type: Boolean, default: false },
|
||||
advancedSegmentation: { type: Boolean, default: false },
|
||||
aiSuggestions: { type: Boolean, default: false }
|
||||
},
|
||||
|
||||
// Compliance
|
||||
compliance: {
|
||||
gdprEnabled: { type: Boolean, default: true },
|
||||
dataRetentionDays: { type: Number, default: 365 },
|
||||
auditLogRetentionDays: { type: Number, default: 730 },
|
||||
ipWhitelist: [String],
|
||||
allowedCountries: [String],
|
||||
blockedCountries: [String]
|
||||
},
|
||||
|
||||
// Trial Information
|
||||
trial: {
|
||||
startDate: Date,
|
||||
endDate: Date,
|
||||
extended: { type: Boolean, default: false },
|
||||
converted: { type: Boolean, default: false }
|
||||
},
|
||||
|
||||
// Metadata
|
||||
metadata: mongoose.Schema.Types.Mixed,
|
||||
|
||||
// Timestamps
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
updatedAt: {
|
||||
type: Date,
|
||||
default: Date.now
|
||||
},
|
||||
lastActiveAt: Date,
|
||||
suspendedAt: Date,
|
||||
deletedAt: Date
|
||||
});
|
||||
|
||||
// Indexes
|
||||
tenantSchema.index({ slug: 1 });
|
||||
tenantSchema.index({ domain: 1 });
|
||||
tenantSchema.index({ status: 1 });
|
||||
tenantSchema.index({ 'owner.email': 1 });
|
||||
tenantSchema.index({ createdAt: -1 });
|
||||
|
||||
// Virtual for trial status
|
||||
tenantSchema.virtual('isTrialActive').get(function() {
|
||||
if (this.status !== 'trial') return false;
|
||||
if (!this.trial.endDate) return false;
|
||||
return new Date() < this.trial.endDate;
|
||||
});
|
||||
|
||||
// Virtual for usage percentage
|
||||
tenantSchema.virtual('usagePercentage').get(function() {
|
||||
const percentages = {
|
||||
users: (this.usage.users / this.limits.users) * 100,
|
||||
campaigns: (this.usage.campaigns / this.limits.campaigns) * 100,
|
||||
messages: (this.usage.messagesThisMonth / this.limits.messagesPerMonth) * 100,
|
||||
storage: (this.usage.storageUsed / this.limits.storage) * 100
|
||||
};
|
||||
return percentages;
|
||||
});
|
||||
|
||||
// Methods
|
||||
tenantSchema.methods.checkLimit = function(resource, amount = 1) {
|
||||
const current = this.usage[resource] || 0;
|
||||
const limit = this.limits[resource] || 0;
|
||||
return current + amount <= limit;
|
||||
};
|
||||
|
||||
tenantSchema.methods.incrementUsage = async function(resource, amount = 1) {
|
||||
this.usage[resource] = (this.usage[resource] || 0) + amount;
|
||||
this.lastActiveAt = new Date();
|
||||
await this.save();
|
||||
};
|
||||
|
||||
tenantSchema.methods.resetMonthlyUsage = async function() {
|
||||
this.usage.messagesThisMonth = 0;
|
||||
this.usage.lastResetDate = new Date();
|
||||
await this.save();
|
||||
};
|
||||
|
||||
tenantSchema.methods.suspend = async function(reason) {
|
||||
this.status = 'suspended';
|
||||
this.suspendedAt = new Date();
|
||||
if (reason) {
|
||||
this.metadata = this.metadata || {};
|
||||
this.metadata.suspensionReason = reason;
|
||||
}
|
||||
await this.save();
|
||||
};
|
||||
|
||||
tenantSchema.methods.activate = async function() {
|
||||
this.status = 'active';
|
||||
this.suspendedAt = null;
|
||||
if (this.metadata && this.metadata.suspensionReason) {
|
||||
delete this.metadata.suspensionReason;
|
||||
}
|
||||
await this.save();
|
||||
};
|
||||
|
||||
// Statics
|
||||
tenantSchema.statics.generateSlug = function(name) {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '');
|
||||
};
|
||||
|
||||
tenantSchema.statics.findByDomain = function(domain) {
|
||||
return this.findOne({ domain, status: { $ne: 'inactive' } });
|
||||
};
|
||||
|
||||
tenantSchema.statics.findBySlug = function(slug) {
|
||||
return this.findOne({ slug, status: { $ne: 'inactive' } });
|
||||
};
|
||||
|
||||
// Middleware
|
||||
tenantSchema.pre('save', function(next) {
|
||||
this.updatedAt = new Date();
|
||||
next();
|
||||
});
|
||||
|
||||
// Reset monthly usage on the first day of each month
|
||||
tenantSchema.pre('save', async function(next) {
|
||||
if (this.usage.lastResetDate) {
|
||||
const now = new Date();
|
||||
const lastReset = new Date(this.usage.lastResetDate);
|
||||
|
||||
if (now.getMonth() !== lastReset.getMonth() || now.getFullYear() !== lastReset.getFullYear()) {
|
||||
this.usage.messagesThisMonth = 0;
|
||||
this.usage.lastResetDate = now;
|
||||
}
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
module.exports = mongoose.model('Tenant', tenantSchema);
|
||||
266
marketing-agent/services/api-gateway/src/models/User.js
Normal file
266
marketing-agent/services/api-gateway/src/models/User.js
Normal file
@@ -0,0 +1,266 @@
|
||||
import mongoose from 'mongoose';
|
||||
import bcrypt from 'bcryptjs';
|
||||
|
||||
const userSchema = new mongoose.Schema({
|
||||
// Tenant association
|
||||
tenantId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'Tenant',
|
||||
required: true,
|
||||
index: true
|
||||
},
|
||||
username: {
|
||||
type: String,
|
||||
required: true,
|
||||
trim: true,
|
||||
minlength: 3,
|
||||
maxlength: 30
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: true,
|
||||
lowercase: true,
|
||||
trim: true
|
||||
},
|
||||
password: {
|
||||
type: String,
|
||||
required: true,
|
||||
minlength: 6
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
enum: ['admin', 'manager', 'operator', 'viewer', 'superadmin'],
|
||||
default: 'operator'
|
||||
},
|
||||
permissions: [{
|
||||
resource: {
|
||||
type: String,
|
||||
enum: [
|
||||
'campaigns',
|
||||
'accounts',
|
||||
'messages',
|
||||
'analytics',
|
||||
'users',
|
||||
'settings',
|
||||
'compliance',
|
||||
'billing'
|
||||
]
|
||||
},
|
||||
actions: [{
|
||||
type: String,
|
||||
enum: ['create', 'read', 'update', 'delete', 'execute']
|
||||
}]
|
||||
}],
|
||||
isActive: {
|
||||
type: Boolean,
|
||||
default: true
|
||||
},
|
||||
lastLogin: Date,
|
||||
loginAttempts: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
lockUntil: Date,
|
||||
twoFactorEnabled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
twoFactorSecret: String,
|
||||
apiKeys: [{
|
||||
key: String,
|
||||
name: String,
|
||||
permissions: [String],
|
||||
createdAt: Date,
|
||||
lastUsed: Date,
|
||||
expiresAt: Date
|
||||
}],
|
||||
preferences: {
|
||||
language: {
|
||||
type: String,
|
||||
default: 'en'
|
||||
},
|
||||
timezone: {
|
||||
type: String,
|
||||
default: 'UTC'
|
||||
},
|
||||
notifications: {
|
||||
email: {
|
||||
type: Boolean,
|
||||
default: true
|
||||
},
|
||||
inApp: {
|
||||
type: Boolean,
|
||||
default: true
|
||||
}
|
||||
}
|
||||
},
|
||||
metadata: {
|
||||
createdBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
},
|
||||
updatedBy: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User'
|
||||
}
|
||||
}
|
||||
}, {
|
||||
timestamps: true
|
||||
});
|
||||
|
||||
// Indexes
|
||||
userSchema.index({ tenantId: 1, email: 1 }, { unique: true });
|
||||
userSchema.index({ tenantId: 1, username: 1 }, { unique: true });
|
||||
userSchema.index({ tenantId: 1, role: 1 });
|
||||
userSchema.index({ tenantId: 1, isActive: 1 });
|
||||
|
||||
// Virtual for account lock
|
||||
userSchema.virtual('isLocked').get(function() {
|
||||
return !!(this.lockUntil && this.lockUntil > Date.now());
|
||||
});
|
||||
|
||||
// Pre-save middleware to hash password
|
||||
userSchema.pre('save', async function(next) {
|
||||
if (!this.isModified('password')) return next();
|
||||
|
||||
try {
|
||||
const salt = await bcrypt.genSalt(10);
|
||||
this.password = await bcrypt.hash(this.password, salt);
|
||||
next();
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
// Method to compare password
|
||||
userSchema.methods.comparePassword = async function(candidatePassword) {
|
||||
return await bcrypt.compare(candidatePassword, this.password);
|
||||
};
|
||||
|
||||
// Method to handle failed login attempts
|
||||
userSchema.methods.incLoginAttempts = function() {
|
||||
// Reset attempts if lock has expired
|
||||
if (this.lockUntil && this.lockUntil < Date.now()) {
|
||||
return this.updateOne({
|
||||
$set: { loginAttempts: 1 },
|
||||
$unset: { lockUntil: 1 }
|
||||
});
|
||||
}
|
||||
|
||||
const updates = { $inc: { loginAttempts: 1 } };
|
||||
const maxAttempts = 5;
|
||||
const lockTime = 2 * 60 * 60 * 1000; // 2 hours
|
||||
|
||||
if (this.loginAttempts + 1 >= maxAttempts && !this.isLocked) {
|
||||
updates.$set = { lockUntil: Date.now() + lockTime };
|
||||
}
|
||||
|
||||
return this.updateOne(updates);
|
||||
};
|
||||
|
||||
// Method to reset login attempts
|
||||
userSchema.methods.resetLoginAttempts = function() {
|
||||
return this.updateOne({
|
||||
$set: { loginAttempts: 0 },
|
||||
$unset: { lockUntil: 1 }
|
||||
});
|
||||
};
|
||||
|
||||
// Method to check permission
|
||||
userSchema.methods.hasPermission = function(resource, action) {
|
||||
// Superadmin has all permissions across all tenants
|
||||
if (this.role === 'superadmin') return true;
|
||||
|
||||
// Admin has all permissions within their tenant
|
||||
if (this.role === 'admin') return true;
|
||||
|
||||
// Check role-based permissions
|
||||
const rolePermissions = {
|
||||
manager: {
|
||||
campaigns: ['create', 'read', 'update', 'delete', 'execute'],
|
||||
accounts: ['create', 'read', 'update'],
|
||||
messages: ['create', 'read', 'update', 'delete'],
|
||||
analytics: ['read'],
|
||||
compliance: ['read', 'update'],
|
||||
settings: ['read', 'update']
|
||||
},
|
||||
operator: {
|
||||
campaigns: ['read', 'execute'],
|
||||
accounts: ['read'],
|
||||
messages: ['create', 'read'],
|
||||
analytics: ['read'],
|
||||
compliance: ['read']
|
||||
},
|
||||
viewer: {
|
||||
campaigns: ['read'],
|
||||
accounts: ['read'],
|
||||
messages: ['read'],
|
||||
analytics: ['read'],
|
||||
compliance: ['read']
|
||||
}
|
||||
};
|
||||
|
||||
const rolePerms = rolePermissions[this.role];
|
||||
if (rolePerms && rolePerms[resource] && rolePerms[resource].includes(action)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check custom permissions
|
||||
const customPerm = this.permissions.find(p => p.resource === resource);
|
||||
return customPerm && customPerm.actions.includes(action);
|
||||
};
|
||||
|
||||
// Method to generate API key
|
||||
userSchema.methods.generateApiKey = function(name, permissions, expiresInDays = 365) {
|
||||
const crypto = require('crypto');
|
||||
const key = crypto.randomBytes(32).toString('hex');
|
||||
|
||||
this.apiKeys.push({
|
||||
key: crypto.createHash('sha256').update(key).digest('hex'),
|
||||
name,
|
||||
permissions,
|
||||
createdAt: new Date(),
|
||||
expiresAt: new Date(Date.now() + expiresInDays * 24 * 60 * 60 * 1000)
|
||||
});
|
||||
|
||||
return key; // Return unhashed key to user
|
||||
};
|
||||
|
||||
// Static method to find by credentials
|
||||
userSchema.statics.findByCredentials = async function(username, password, tenantId = null) {
|
||||
const query = {
|
||||
$or: [{ username }, { email: username }],
|
||||
isActive: true
|
||||
};
|
||||
|
||||
// For non-superadmin users, require tenant context
|
||||
if (tenantId) {
|
||||
query.tenantId = tenantId;
|
||||
}
|
||||
|
||||
const user = await this.findOne(query);
|
||||
|
||||
if (!user) {
|
||||
throw new Error('Invalid credentials');
|
||||
}
|
||||
|
||||
if (user.isLocked) {
|
||||
throw new Error('Account is locked due to too many failed attempts');
|
||||
}
|
||||
|
||||
const isMatch = await user.comparePassword(password);
|
||||
|
||||
if (!isMatch) {
|
||||
await user.incLoginAttempts();
|
||||
throw new Error('Invalid credentials');
|
||||
}
|
||||
|
||||
// Reset login attempts and update last login
|
||||
await user.resetLoginAttempts();
|
||||
user.lastLogin = new Date();
|
||||
await user.save();
|
||||
|
||||
return user;
|
||||
};
|
||||
|
||||
export const User = mongoose.model('User', userSchema);
|
||||
502
marketing-agent/services/api-gateway/src/routes/auth.js
Normal file
502
marketing-agent/services/api-gateway/src/routes/auth.js
Normal file
@@ -0,0 +1,502 @@
|
||||
import express from 'express';
|
||||
import axios from 'axios';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import mongoose from 'mongoose';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { config } from '../config/index.js';
|
||||
import { generateToken, generateRefreshToken, authenticate } from '../middleware/auth.js';
|
||||
import { strictRateLimiter } from '../middleware/rateLimiter.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Import User model from separate file
|
||||
import { User } from '../models/User.js';
|
||||
import Tenant from '../models/Tenant.js';
|
||||
|
||||
// User model is now imported from models/User.js
|
||||
|
||||
// 连接 MongoDB
|
||||
mongoose.connect(config.mongodb.uri || 'mongodb://mongodb:27017/marketing_agent')
|
||||
.then(() => logger.info('Connected to MongoDB for auth'))
|
||||
.catch(err => logger.error('MongoDB connection error:', err));
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/login:
|
||||
* post:
|
||||
* summary: User login
|
||||
* tags: [Authentication]
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - username
|
||||
* - password
|
||||
* properties:
|
||||
* username:
|
||||
* type: string
|
||||
* password:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Login successful
|
||||
* 401:
|
||||
* description: Invalid credentials
|
||||
*/
|
||||
router.post('/login', strictRateLimiter, async (req, res) => {
|
||||
try {
|
||||
const { username, password, tenantSlug, tenantId } = req.body;
|
||||
|
||||
// Validate input
|
||||
if (!username || !password) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Username and password are required'
|
||||
});
|
||||
}
|
||||
|
||||
// Get tenant context
|
||||
let tenant = null;
|
||||
if (tenantSlug) {
|
||||
tenant = await Tenant.findBySlug(tenantSlug);
|
||||
} else if (tenantId) {
|
||||
tenant = await Tenant.findById(tenantId);
|
||||
} else if (req.get('host')) {
|
||||
// Try to find tenant by domain
|
||||
const host = req.get('host');
|
||||
tenant = await Tenant.findByDomain(host);
|
||||
|
||||
// Try subdomain
|
||||
if (!tenant) {
|
||||
const subdomain = host.split('.')[0];
|
||||
if (subdomain && subdomain !== 'app' && subdomain !== 'www') {
|
||||
tenant = await Tenant.findBySlug(subdomain);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find user with tenant context
|
||||
const user = await User.findByCredentials(username, password, tenant?._id);
|
||||
|
||||
if (!user) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid credentials'
|
||||
});
|
||||
}
|
||||
|
||||
// Password validation is handled in findByCredentials method
|
||||
|
||||
// 检查用户是否激活
|
||||
if (!user.isActive) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Account is inactive'
|
||||
});
|
||||
}
|
||||
|
||||
// 更新最后登录时间
|
||||
user.lastLogin = new Date();
|
||||
await user.save();
|
||||
|
||||
// Load tenant if user has tenantId but we don't have tenant yet
|
||||
if (user.tenantId && !tenant) {
|
||||
tenant = await Tenant.findById(user.tenantId);
|
||||
}
|
||||
|
||||
// Check tenant status
|
||||
if (tenant && tenant.status === 'suspended') {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Tenant account is suspended'
|
||||
});
|
||||
}
|
||||
|
||||
// Generate tokens
|
||||
const tokenPayload = {
|
||||
userId: user._id.toString(),
|
||||
tenantId: user.tenantId?.toString(),
|
||||
username: user.username,
|
||||
role: user.role,
|
||||
permissions: user.role === 'admin' || user.role === 'superadmin' ? ['all'] : ['read', 'write']
|
||||
};
|
||||
|
||||
const accessToken = generateToken(tokenPayload);
|
||||
const refreshToken = generateRefreshToken(tokenPayload);
|
||||
|
||||
// Store refresh token in cache
|
||||
await cache.set(`refresh:${user._id}`, refreshToken, 7 * 24 * 60 * 60); // 7 days
|
||||
|
||||
// Log successful login
|
||||
logger.info('User logged in', { userId: user._id, username: user.username });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
accessToken,
|
||||
refreshToken,
|
||||
user: {
|
||||
id: user._id.toString(),
|
||||
username: user.username,
|
||||
role: user.role,
|
||||
tenantId: user.tenantId?.toString(),
|
||||
tenant: tenant ? {
|
||||
id: tenant._id.toString(),
|
||||
name: tenant.name,
|
||||
slug: tenant.slug,
|
||||
plan: tenant.plan
|
||||
} : null
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Login error:', error);
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Login failed'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/register:
|
||||
* post:
|
||||
* summary: Register new user
|
||||
* tags: [Authentication]
|
||||
*/
|
||||
router.post('/register', strictRateLimiter, async (req, res) => {
|
||||
try {
|
||||
const { username, password, email, accountName } = req.body;
|
||||
|
||||
// Validate input
|
||||
if (!username || !password || !email) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Username, password, and email are required'
|
||||
});
|
||||
}
|
||||
|
||||
// Create account first
|
||||
const accountResponse = await axios.post(
|
||||
`${config.services.telegramSystem.url}/api/accounts`,
|
||||
{ name: accountName || `${username}'s Account` },
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
|
||||
const accountId = accountResponse.data.account.id;
|
||||
|
||||
// Register user
|
||||
const userResponse = await axios.post(
|
||||
`${config.services.telegramSystem.url}/api/auth/register`,
|
||||
{
|
||||
username,
|
||||
password,
|
||||
email,
|
||||
accountId
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
|
||||
if (!userResponse.data.success) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: userResponse.data.error || 'Registration failed'
|
||||
});
|
||||
}
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Registration successful',
|
||||
data: {
|
||||
userId: userResponse.data.user.id,
|
||||
accountId
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Registration error:', error);
|
||||
|
||||
if (error.response?.status === 409) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: 'Username or email already exists'
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Registration failed'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/refresh:
|
||||
* post:
|
||||
* summary: Refresh access token
|
||||
* tags: [Authentication]
|
||||
*/
|
||||
router.post('/refresh', async (req, res) => {
|
||||
try {
|
||||
const { refreshToken } = req.body;
|
||||
|
||||
if (!refreshToken) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Refresh token required'
|
||||
});
|
||||
}
|
||||
|
||||
// Verify refresh token
|
||||
const jwt = await import('jsonwebtoken');
|
||||
const decoded = jwt.default.verify(refreshToken, config.jwt.secret);
|
||||
|
||||
// Check if refresh token exists in cache
|
||||
const storedToken = await cache.get(`refresh:${decoded.userId}`);
|
||||
if (!storedToken || storedToken !== refreshToken) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid refresh token'
|
||||
});
|
||||
}
|
||||
|
||||
// Generate new access token
|
||||
const newAccessToken = generateToken({
|
||||
userId: decoded.userId,
|
||||
tenantId: decoded.tenantId,
|
||||
username: decoded.username,
|
||||
role: decoded.role,
|
||||
permissions: decoded.permissions
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
accessToken: newAccessToken
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Token refresh error:', error);
|
||||
res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid refresh token'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/logout:
|
||||
* post:
|
||||
* summary: User logout
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
*/
|
||||
router.post('/logout', authenticate, async (req, res) => {
|
||||
try {
|
||||
// Blacklist the current token
|
||||
const token = req.token;
|
||||
const ttl = 24 * 60 * 60; // 24 hours
|
||||
await cache.set(`blacklist:${token}`, '1', ttl);
|
||||
|
||||
// Remove refresh token
|
||||
await cache.del(`refresh:${req.user.id}`);
|
||||
|
||||
logger.info('User logged out', { userId: req.user.id });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Logged out successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Logout error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Logout failed'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/me:
|
||||
* get:
|
||||
* summary: Get current user info
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
*/
|
||||
router.get('/me', authenticate, async (req, res) => {
|
||||
try {
|
||||
// Get full user details from Telegram System
|
||||
const response = await axios.get(
|
||||
`${config.services.telegramSystem.url}/api/users/${req.user.id}`,
|
||||
{
|
||||
headers: { 'X-Internal-Service': 'api-gateway' },
|
||||
timeout: 5000
|
||||
}
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: response.data.user
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Get user info error:', error);
|
||||
|
||||
// Return basic info from token if service is down
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
id: req.user.id,
|
||||
tenantId: req.user.tenantId,
|
||||
role: req.user.role,
|
||||
permissions: req.user.permissions
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/api-keys:
|
||||
* post:
|
||||
* summary: Generate API key
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
*/
|
||||
router.post('/api-keys', authenticate, async (req, res) => {
|
||||
try {
|
||||
const { name, permissions = ['read'] } = req.body;
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'API key name required'
|
||||
});
|
||||
}
|
||||
|
||||
// Generate API key
|
||||
const apiKey = uuidv4().replace(/-/g, '');
|
||||
const keyData = {
|
||||
key: apiKey,
|
||||
name,
|
||||
userId: req.user.id,
|
||||
tenantId: req.user.tenantId,
|
||||
permissions,
|
||||
createdAt: new Date(),
|
||||
lastUsed: null
|
||||
};
|
||||
|
||||
// Store API key (in production, store in database)
|
||||
await cache.set(`apikey:${apiKey}`, JSON.stringify(keyData), 365 * 24 * 60 * 60); // 1 year
|
||||
|
||||
// Store user's API keys list
|
||||
await cache.sadd(`user:${req.user.id}:apikeys`, apiKey);
|
||||
|
||||
logger.info('API key generated', { userId: req.user.id, keyName: name });
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: {
|
||||
apiKey,
|
||||
name,
|
||||
permissions,
|
||||
createdAt: keyData.createdAt
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('API key generation error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to generate API key'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/api-keys:
|
||||
* get:
|
||||
* summary: List user's API keys
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
*/
|
||||
router.get('/api-keys', authenticate, async (req, res) => {
|
||||
try {
|
||||
// Get user's API keys
|
||||
const keyIds = await cache.smembers(`user:${req.user.id}:apikeys`);
|
||||
const keys = [];
|
||||
|
||||
for (const keyId of keyIds) {
|
||||
const keyData = await cache.get(`apikey:${keyId}`);
|
||||
if (keyData) {
|
||||
const parsed = JSON.parse(keyData);
|
||||
keys.push({
|
||||
id: keyId.substring(0, 8) + '...',
|
||||
name: parsed.name,
|
||||
permissions: parsed.permissions,
|
||||
createdAt: parsed.createdAt,
|
||||
lastUsed: parsed.lastUsed
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: keys
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('List API keys error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to list API keys'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/auth/api-keys/{keyId}:
|
||||
* delete:
|
||||
* summary: Revoke API key
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
*/
|
||||
router.delete('/api-keys/:keyId', authenticate, async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
|
||||
// Remove from user's key list
|
||||
await cache.srem(`user:${req.user.id}:apikeys`, keyId);
|
||||
|
||||
// Delete key data
|
||||
await cache.del(`apikey:${keyId}`);
|
||||
|
||||
logger.info('API key revoked', { userId: req.user.id, keyId: keyId.substring(0, 8) });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'API key revoked successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Revoke API key error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to revoke API key'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
174
marketing-agent/services/api-gateway/src/routes/authDocs.js
Normal file
174
marketing-agent/services/api-gateway/src/routes/authDocs.js
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* @swagger
|
||||
* /auth/login:
|
||||
* post:
|
||||
* summary: Authenticate user
|
||||
* tags: [Authentication]
|
||||
* security: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - username
|
||||
* - password
|
||||
* properties:
|
||||
* username:
|
||||
* type: string
|
||||
* example: admin
|
||||
* password:
|
||||
* type: string
|
||||
* format: password
|
||||
* example: password123
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Login successful
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* user:
|
||||
* $ref: '#/components/schemas/User'
|
||||
* tokens:
|
||||
* type: object
|
||||
* properties:
|
||||
* accessToken:
|
||||
* type: string
|
||||
* refreshToken:
|
||||
* type: string
|
||||
* expiresIn:
|
||||
* type: integer
|
||||
* example: 86400
|
||||
* 401:
|
||||
* description: Invalid credentials
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: '#/components/schemas/Error'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /auth/register:
|
||||
* post:
|
||||
* summary: Register new user
|
||||
* tags: [Authentication]
|
||||
* security: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - username
|
||||
* - email
|
||||
* - password
|
||||
* properties:
|
||||
* username:
|
||||
* type: string
|
||||
* example: newuser
|
||||
* email:
|
||||
* type: string
|
||||
* format: email
|
||||
* example: user@example.com
|
||||
* password:
|
||||
* type: string
|
||||
* format: password
|
||||
* minLength: 8
|
||||
* fullName:
|
||||
* type: string
|
||||
* example: John Doe
|
||||
* responses:
|
||||
* 201:
|
||||
* description: User registered successfully
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /auth/me:
|
||||
* get:
|
||||
* summary: Get current user profile
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: User profile
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* data:
|
||||
* $ref: '#/components/schemas/User'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /auth/logout:
|
||||
* post:
|
||||
* summary: Logout user
|
||||
* tags: [Authentication]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Logged out successfully
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /auth/refresh:
|
||||
* post:
|
||||
* summary: Refresh access token
|
||||
* tags: [Authentication]
|
||||
* security: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - refreshToken
|
||||
* properties:
|
||||
* refreshToken:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Token refreshed
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* accessToken:
|
||||
* type: string
|
||||
* expiresIn:
|
||||
* type: integer
|
||||
* 401:
|
||||
* description: Invalid refresh token
|
||||
*/
|
||||
629
marketing-agent/services/api-gateway/src/routes/backup.js
Normal file
629
marketing-agent/services/api-gateway/src/routes/backup.js
Normal file
@@ -0,0 +1,629 @@
|
||||
import express from 'express';
|
||||
import { authenticate } from '../middleware/auth.js';
|
||||
import { requireRole } from '../middleware/permission.js';
|
||||
import { validateRequest } from '../middleware/validation.js';
|
||||
import { body, query, param } from 'express-validator';
|
||||
import { backupService } from '../services/backup.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
import fs from 'fs/promises';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/create:
|
||||
* post:
|
||||
* summary: Create a system backup
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* description:
|
||||
* type: string
|
||||
* description: Backup description
|
||||
* encrypt:
|
||||
* type: boolean
|
||||
* description: Encrypt the backup
|
||||
* uploadToCloud:
|
||||
* type: boolean
|
||||
* description: Upload to cloud storage
|
||||
* components:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* enum: [mongodb, redis, postgresql, files]
|
||||
* description: Components to backup (default all)
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Backup created successfully
|
||||
* 409:
|
||||
* description: Backup already in progress
|
||||
*/
|
||||
router.post('/create',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
body('description').optional().isString().trim(),
|
||||
body('encrypt').optional().isBoolean(),
|
||||
body('uploadToCloud').optional().isBoolean(),
|
||||
body('components').optional().isArray()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { description, encrypt, uploadToCloud, components } = req.body;
|
||||
|
||||
// Check if backup is already running
|
||||
const isRunning = await cache.get('backup:running');
|
||||
if (isRunning) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: 'Backup already in progress'
|
||||
});
|
||||
}
|
||||
|
||||
// Set running flag
|
||||
await cache.set('backup:running', 'true', 'EX', 3600); // 1 hour timeout
|
||||
|
||||
// Start backup in background
|
||||
backupService.createFullBackup({
|
||||
description,
|
||||
encrypt,
|
||||
uploadToCloud,
|
||||
components,
|
||||
initiatedBy: req.user.id
|
||||
}).then(async (result) => {
|
||||
// Store backup info
|
||||
await cache.lpush('backup:history', JSON.stringify({
|
||||
...result,
|
||||
description,
|
||||
initiatedBy: req.user.id
|
||||
}));
|
||||
await cache.ltrim('backup:history', 0, 99); // Keep last 100
|
||||
|
||||
// Clear running flag
|
||||
await cache.del('backup:running');
|
||||
|
||||
logger.info('Backup completed', result);
|
||||
}).catch(async (error) => {
|
||||
// Clear running flag
|
||||
await cache.del('backup:running');
|
||||
|
||||
logger.error('Backup failed', error);
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup started',
|
||||
jobId: `backup-${Date.now()}`
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to start backup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to start backup'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/restore/{backupId}:
|
||||
* post:
|
||||
* summary: Restore from a backup
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: backupId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Backup ID to restore from
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* skipMongoDB:
|
||||
* type: boolean
|
||||
* skipRedis:
|
||||
* type: boolean
|
||||
* skipPostgreSQL:
|
||||
* type: boolean
|
||||
* skipFiles:
|
||||
* type: boolean
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Restore completed
|
||||
*/
|
||||
router.post('/restore/:backupId',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('backupId').notEmpty(),
|
||||
body('skipMongoDB').optional().isBoolean(),
|
||||
body('skipRedis').optional().isBoolean(),
|
||||
body('skipPostgreSQL').optional().isBoolean(),
|
||||
body('skipFiles').optional().isBoolean()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { backupId } = req.params;
|
||||
const options = req.body;
|
||||
|
||||
// Check if restore is already running
|
||||
const isRunning = await cache.get('restore:running');
|
||||
if (isRunning) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: 'Restore already in progress'
|
||||
});
|
||||
}
|
||||
|
||||
// Verify backup exists
|
||||
const backupPath = `/backups/${backupId}.tar.gz`;
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch (error) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Confirm restore operation
|
||||
logger.warn('System restore initiated', {
|
||||
backupId,
|
||||
userId: req.user.id,
|
||||
options
|
||||
});
|
||||
|
||||
// Set running flag
|
||||
await cache.set('restore:running', 'true', 'EX', 3600);
|
||||
|
||||
// Start restore in background
|
||||
backupService.restoreFromBackup(backupPath, options)
|
||||
.then(async (result) => {
|
||||
await cache.del('restore:running');
|
||||
|
||||
// Log restore event
|
||||
await cache.lpush('restore:history', JSON.stringify({
|
||||
...result,
|
||||
backupId,
|
||||
initiatedBy: req.user.id,
|
||||
timestamp: new Date().toISOString()
|
||||
}));
|
||||
|
||||
logger.info('Restore completed', result);
|
||||
})
|
||||
.catch(async (error) => {
|
||||
await cache.del('restore:running');
|
||||
logger.error('Restore failed', error);
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Restore started',
|
||||
warning: 'System will be temporarily unavailable during restore'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to start restore:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to start restore'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/list:
|
||||
* get:
|
||||
* summary: List available backups
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* - in: query
|
||||
* name: offset
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 0
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of backups
|
||||
*/
|
||||
router.get('/list',
|
||||
authenticate,
|
||||
requireRole('admin', 'manager'),
|
||||
validateRequest([
|
||||
query('limit').optional().isInt({ min: 1, max: 100 }),
|
||||
query('offset').optional().isInt({ min: 0 })
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { limit = 20, offset = 0 } = req.query;
|
||||
|
||||
const backups = await backupService.listBackups();
|
||||
|
||||
// Get backup history from cache
|
||||
const history = await cache.lrange('backup:history', 0, -1);
|
||||
const historyMap = new Map();
|
||||
|
||||
history.forEach(item => {
|
||||
try {
|
||||
const data = JSON.parse(item);
|
||||
historyMap.set(data.backupId, data);
|
||||
} catch (error) {
|
||||
// Skip invalid entries
|
||||
}
|
||||
});
|
||||
|
||||
// Merge backup info with history
|
||||
const enrichedBackups = backups.map(backup => ({
|
||||
...backup,
|
||||
...historyMap.get(backup.id)
|
||||
}));
|
||||
|
||||
// Apply pagination
|
||||
const paginatedBackups = enrichedBackups.slice(
|
||||
parseInt(offset),
|
||||
parseInt(offset) + parseInt(limit)
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
backups: paginatedBackups,
|
||||
total: backups.length,
|
||||
limit: parseInt(limit),
|
||||
offset: parseInt(offset)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to list backups:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to list backups'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/{backupId}:
|
||||
* delete:
|
||||
* summary: Delete a backup
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: backupId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Backup deleted
|
||||
*/
|
||||
router.delete('/:backupId',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('backupId').notEmpty()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { backupId } = req.params;
|
||||
|
||||
const backupPath = `/backups/${backupId}.tar.gz`;
|
||||
|
||||
try {
|
||||
await fs.unlink(backupPath);
|
||||
|
||||
logger.info('Backup deleted', {
|
||||
backupId,
|
||||
deletedBy: req.user.id
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup not found'
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete backup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to delete backup'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/status:
|
||||
* get:
|
||||
* summary: Get backup/restore status
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Current status
|
||||
*/
|
||||
router.get('/status',
|
||||
authenticate,
|
||||
requireRole('admin', 'manager'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const backupRunning = await cache.get('backup:running');
|
||||
const restoreRunning = await cache.get('restore:running');
|
||||
|
||||
const status = {
|
||||
backup: {
|
||||
running: backupRunning === 'true',
|
||||
lastBackup: null
|
||||
},
|
||||
restore: {
|
||||
running: restoreRunning === 'true',
|
||||
lastRestore: null
|
||||
}
|
||||
};
|
||||
|
||||
// Get last backup info
|
||||
const lastBackup = await cache.lindex('backup:history', 0);
|
||||
if (lastBackup) {
|
||||
try {
|
||||
status.backup.lastBackup = JSON.parse(lastBackup);
|
||||
} catch (error) {
|
||||
// Skip invalid entry
|
||||
}
|
||||
}
|
||||
|
||||
// Get last restore info
|
||||
const lastRestore = await cache.lindex('restore:history', 0);
|
||||
if (lastRestore) {
|
||||
try {
|
||||
status.restore.lastRestore = JSON.parse(lastRestore);
|
||||
} catch (error) {
|
||||
// Skip invalid entry
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: status
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get backup status:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to get backup status'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/cleanup:
|
||||
* post:
|
||||
* summary: Clean up old backups
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* retentionDays:
|
||||
* type: integer
|
||||
* default: 30
|
||||
* description: Number of days to retain backups
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Cleanup results
|
||||
*/
|
||||
router.post('/cleanup',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
body('retentionDays').optional().isInt({ min: 1, max: 365 })
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { retentionDays = 30 } = req.body;
|
||||
|
||||
const result = await backupService.cleanupOldBackups(retentionDays);
|
||||
|
||||
logger.info('Backup cleanup completed', {
|
||||
...result,
|
||||
retentionDays,
|
||||
initiatedBy: req.user.id
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
...result,
|
||||
retentionDays,
|
||||
freedSpaceMB: Math.round(result.freedSpace / 1024 / 1024)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to cleanup backups:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to cleanup backups'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/schedule:
|
||||
* post:
|
||||
* summary: Schedule automatic backups
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - enabled
|
||||
* properties:
|
||||
* enabled:
|
||||
* type: boolean
|
||||
* schedule:
|
||||
* type: string
|
||||
* description: Cron expression (e.g., "0 2 * * *")
|
||||
* encrypt:
|
||||
* type: boolean
|
||||
* uploadToCloud:
|
||||
* type: boolean
|
||||
* retentionDays:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Schedule updated
|
||||
*/
|
||||
router.post('/schedule',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
body('enabled').isBoolean(),
|
||||
body('schedule').optional().matches(/^[\d\s\*\/\-,]+$/),
|
||||
body('encrypt').optional().isBoolean(),
|
||||
body('uploadToCloud').optional().isBoolean(),
|
||||
body('retentionDays').optional().isInt({ min: 1, max: 365 })
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const scheduleConfig = req.body;
|
||||
|
||||
// Store schedule configuration
|
||||
await cache.set(
|
||||
'backup:schedule:config',
|
||||
JSON.stringify({
|
||||
...scheduleConfig,
|
||||
updatedBy: req.user.id,
|
||||
updatedAt: new Date().toISOString()
|
||||
})
|
||||
);
|
||||
|
||||
logger.info('Backup schedule updated', {
|
||||
...scheduleConfig,
|
||||
updatedBy: req.user.id
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup schedule updated',
|
||||
data: scheduleConfig
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to update backup schedule:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to update backup schedule'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/backup/download/{backupId}:
|
||||
* get:
|
||||
* summary: Download a backup file
|
||||
* tags: [Backup]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: backupId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Backup file
|
||||
* content:
|
||||
* application/octet-stream:
|
||||
* schema:
|
||||
* type: string
|
||||
* format: binary
|
||||
*/
|
||||
router.get('/download/:backupId',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('backupId').notEmpty()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { backupId } = req.params;
|
||||
const backupPath = `/backups/${backupId}.tar.gz`;
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch (error) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Log download
|
||||
logger.info('Backup download', {
|
||||
backupId,
|
||||
downloadedBy: req.user.id
|
||||
});
|
||||
|
||||
// Send file
|
||||
res.download(backupPath, `backup-${backupId}.tar.gz`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to download backup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to download backup'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export default router;
|
||||
717
marketing-agent/services/api-gateway/src/routes/campaigns.js
Normal file
717
marketing-agent/services/api-gateway/src/routes/campaigns.js
Normal file
@@ -0,0 +1,717 @@
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* Campaign:
|
||||
* type: object
|
||||
* required:
|
||||
* - name
|
||||
* - type
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* description: Campaign unique identifier
|
||||
* example: camp_123456789
|
||||
* name:
|
||||
* type: string
|
||||
* description: Campaign name
|
||||
* example: Summer Sale Campaign
|
||||
* description:
|
||||
* type: string
|
||||
* description: Campaign description
|
||||
* example: Promotional campaign for summer products
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [message, invitation, data_collection, engagement, custom]
|
||||
* description: Campaign type
|
||||
* example: message
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [draft, active, paused, completed, cancelled]
|
||||
* description: Campaign status
|
||||
* example: active
|
||||
* content:
|
||||
* type: object
|
||||
* properties:
|
||||
* messageTemplateId:
|
||||
* type: string
|
||||
* description: Message template ID
|
||||
* customMessage:
|
||||
* type: string
|
||||
* description: Custom message content
|
||||
* media:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [image, video, document]
|
||||
* url:
|
||||
* type: string
|
||||
* caption:
|
||||
* type: string
|
||||
* targeting:
|
||||
* type: object
|
||||
* properties:
|
||||
* includedUsers:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: List of user IDs to include
|
||||
* excludedUsers:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: List of user IDs to exclude
|
||||
* segments:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: List of segment IDs
|
||||
* groups:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: List of group IDs
|
||||
* tags:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: List of tags
|
||||
* filters:
|
||||
* type: object
|
||||
* description: Dynamic filters
|
||||
* settings:
|
||||
* type: object
|
||||
* properties:
|
||||
* rateLimit:
|
||||
* type: object
|
||||
* properties:
|
||||
* messagesPerSecond:
|
||||
* type: number
|
||||
* example: 10
|
||||
* messagesPerUser:
|
||||
* type: number
|
||||
* example: 1
|
||||
* scheduling:
|
||||
* type: object
|
||||
* properties:
|
||||
* startTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* endTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* timezone:
|
||||
* type: string
|
||||
* example: America/New_York
|
||||
* abTesting:
|
||||
* type: object
|
||||
* properties:
|
||||
* enabled:
|
||||
* type: boolean
|
||||
* variants:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* goals:
|
||||
* type: object
|
||||
* properties:
|
||||
* targetAudience:
|
||||
* type: integer
|
||||
* description: Target number of recipients
|
||||
* example: 1000
|
||||
* conversionRate:
|
||||
* type: number
|
||||
* description: Target conversion rate percentage
|
||||
* example: 15.5
|
||||
* revenue:
|
||||
* type: number
|
||||
* description: Target revenue in currency
|
||||
* example: 50000
|
||||
* statistics:
|
||||
* type: object
|
||||
* readOnly: true
|
||||
* properties:
|
||||
* messagesSent:
|
||||
* type: integer
|
||||
* example: 850
|
||||
* delivered:
|
||||
* type: integer
|
||||
* example: 820
|
||||
* read:
|
||||
* type: integer
|
||||
* example: 650
|
||||
* clicked:
|
||||
* type: integer
|
||||
* example: 120
|
||||
* conversions:
|
||||
* type: integer
|
||||
* example: 45
|
||||
* revenue:
|
||||
* type: number
|
||||
* example: 12500
|
||||
* createdAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* updatedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* createdBy:
|
||||
* type: string
|
||||
* readOnly: true
|
||||
* description: User ID who created the campaign
|
||||
*
|
||||
* CampaignExecution:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaignId:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* executionId:
|
||||
* type: string
|
||||
* description: Execution unique identifier
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [running, completed, failed, cancelled]
|
||||
* startedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* completedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* progress:
|
||||
* type: object
|
||||
* properties:
|
||||
* total:
|
||||
* type: integer
|
||||
* processed:
|
||||
* type: integer
|
||||
* succeeded:
|
||||
* type: integer
|
||||
* failed:
|
||||
* type: integer
|
||||
* errors:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* userId:
|
||||
* type: string
|
||||
* error:
|
||||
* type: string
|
||||
* timestamp:
|
||||
* type: string
|
||||
* format: date-time
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns:
|
||||
* get:
|
||||
* summary: List all campaigns
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: page
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 1
|
||||
* description: Page number
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* maximum: 100
|
||||
* description: Items per page
|
||||
* - in: query
|
||||
* name: status
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [draft, active, paused, completed, cancelled]
|
||||
* description: Filter by status
|
||||
* - in: query
|
||||
* name: type
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [message, invitation, data_collection, engagement, custom]
|
||||
* description: Filter by type
|
||||
* - in: query
|
||||
* name: search
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Search in name and description
|
||||
* - in: query
|
||||
* name: sort
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [createdAt, updatedAt, name, status]
|
||||
* description: Sort field
|
||||
* - in: query
|
||||
* name: order
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [asc, desc]
|
||||
* default: desc
|
||||
* description: Sort order
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of campaigns
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaigns:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/Campaign'
|
||||
* pagination:
|
||||
* $ref: '#/components/schemas/Pagination'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* post:
|
||||
* summary: Create new campaign
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - name
|
||||
* - type
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* example: Summer Sale Campaign
|
||||
* description:
|
||||
* type: string
|
||||
* example: Promotional campaign for summer products
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [message, invitation, data_collection, engagement, custom]
|
||||
* example: message
|
||||
* content:
|
||||
* type: object
|
||||
* targeting:
|
||||
* type: object
|
||||
* settings:
|
||||
* type: object
|
||||
* goals:
|
||||
* type: object
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Campaign created successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaign:
|
||||
* $ref: '#/components/schemas/Campaign'
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns/{id}:
|
||||
* get:
|
||||
* summary: Get campaign by ID
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Campaign details
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaign:
|
||||
* $ref: '#/components/schemas/Campaign'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* put:
|
||||
* summary: Update campaign
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* content:
|
||||
* type: object
|
||||
* targeting:
|
||||
* type: object
|
||||
* settings:
|
||||
* type: object
|
||||
* goals:
|
||||
* type: object
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Campaign updated successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaign:
|
||||
* $ref: '#/components/schemas/Campaign'
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* delete:
|
||||
* summary: Delete campaign
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Campaign deleted successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* message:
|
||||
* type: string
|
||||
* example: Campaign deleted successfully
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns/{id}/execute:
|
||||
* post:
|
||||
* summary: Execute campaign
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* test:
|
||||
* type: boolean
|
||||
* description: Run in test mode
|
||||
* example: false
|
||||
* testUsers:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* description: User IDs for test execution
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Campaign execution started
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* executionId:
|
||||
* type: string
|
||||
* example: exec_123456789
|
||||
* status:
|
||||
* type: string
|
||||
* example: running
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns/{id}/executions:
|
||||
* get:
|
||||
* summary: Get campaign execution history
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* - in: query
|
||||
* name: page
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 1
|
||||
* description: Page number
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* description: Items per page
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of campaign executions
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* executions:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/CampaignExecution'
|
||||
* pagination:
|
||||
* $ref: '#/components/schemas/Pagination'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns/{id}/statistics:
|
||||
* get:
|
||||
* summary: Get campaign statistics
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID
|
||||
* - in: query
|
||||
* name: dateRange
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [today, yesterday, last7days, last30days, custom]
|
||||
* description: Date range for statistics
|
||||
* - in: query
|
||||
* name: startDate
|
||||
* schema:
|
||||
* type: string
|
||||
* format: date
|
||||
* description: Start date for custom range
|
||||
* - in: query
|
||||
* name: endDate
|
||||
* schema:
|
||||
* type: string
|
||||
* format: date
|
||||
* description: End date for custom range
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Campaign statistics
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* statistics:
|
||||
* type: object
|
||||
* properties:
|
||||
* overview:
|
||||
* type: object
|
||||
* properties:
|
||||
* totalRecipients:
|
||||
* type: integer
|
||||
* messagesSent:
|
||||
* type: integer
|
||||
* delivered:
|
||||
* type: integer
|
||||
* deliveryRate:
|
||||
* type: number
|
||||
* read:
|
||||
* type: integer
|
||||
* readRate:
|
||||
* type: number
|
||||
* clicked:
|
||||
* type: integer
|
||||
* clickRate:
|
||||
* type: number
|
||||
* conversions:
|
||||
* type: integer
|
||||
* conversionRate:
|
||||
* type: number
|
||||
* timeline:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* date:
|
||||
* type: string
|
||||
* format: date
|
||||
* sent:
|
||||
* type: integer
|
||||
* delivered:
|
||||
* type: integer
|
||||
* read:
|
||||
* type: integer
|
||||
* clicked:
|
||||
* type: integer
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /orchestrator/campaigns/{id}/duplicate:
|
||||
* post:
|
||||
* summary: Duplicate campaign
|
||||
* tags: [Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Campaign ID to duplicate
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* description: Name for the new campaign
|
||||
* example: Summer Sale Campaign (Copy)
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Campaign duplicated successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* campaign:
|
||||
* $ref: '#/components/schemas/Campaign'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
// This is a documentation-only file for Swagger
|
||||
export default {};
|
||||
294
marketing-agent/services/api-gateway/src/routes/dataExchange.js
Normal file
294
marketing-agent/services/api-gateway/src/routes/dataExchange.js
Normal file
@@ -0,0 +1,294 @@
|
||||
import express from 'express';
|
||||
import multer from 'multer';
|
||||
import { dataExchangeService } from '../services/dataExchange.js';
|
||||
import { authenticate, authorize } from '../middleware/auth.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Configure multer for file uploads
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: {
|
||||
fileSize: 50 * 1024 * 1024 // 50MB limit
|
||||
},
|
||||
fileFilter: (req, file, cb) => {
|
||||
const allowedMimes = [
|
||||
'text/csv',
|
||||
'application/json',
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
'application/vnd.ms-excel'
|
||||
];
|
||||
|
||||
if (allowedMimes.includes(file.mimetype)) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('Invalid file type. Only CSV, JSON, and Excel files are allowed.'));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Apply authentication to all routes
|
||||
router.use(authenticate);
|
||||
|
||||
/**
|
||||
* Export data
|
||||
* GET /api/data-exchange/export/:entityType
|
||||
*/
|
||||
router.get('/export/:entityType', authorize(['admin', 'manager']), async (req, res, next) => {
|
||||
try {
|
||||
const { entityType } = req.params;
|
||||
const { format = 'csv', ...filters } = req.query;
|
||||
|
||||
// Parse pagination options
|
||||
const options = {};
|
||||
if (req.query.limit) {
|
||||
options.limit = parseInt(req.query.limit);
|
||||
}
|
||||
if (req.query.skip) {
|
||||
options.skip = parseInt(req.query.skip);
|
||||
}
|
||||
if (req.query.sort) {
|
||||
options.sort = req.query.sort;
|
||||
}
|
||||
|
||||
// Export data
|
||||
const result = await dataExchangeService.exportData(
|
||||
entityType,
|
||||
format,
|
||||
filters,
|
||||
options
|
||||
);
|
||||
|
||||
// Set response headers
|
||||
res.setHeader('Content-Type', result.mimeType);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${result.filename}"`);
|
||||
|
||||
// Send file
|
||||
if (format === 'excel') {
|
||||
res.send(Buffer.from(result.data));
|
||||
} else {
|
||||
res.send(result.data);
|
||||
}
|
||||
|
||||
// Log export
|
||||
logger.info('Data exported', {
|
||||
userId: req.auth.userId,
|
||||
entityType,
|
||||
format,
|
||||
count: result.count
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Import data
|
||||
* POST /api/data-exchange/import/:entityType
|
||||
*/
|
||||
router.post('/import/:entityType',
|
||||
authorize(['admin']),
|
||||
upload.single('file'),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const { entityType } = req.params;
|
||||
const { updateExisting = false } = req.body;
|
||||
|
||||
if (!req.file) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'No file uploaded'
|
||||
});
|
||||
}
|
||||
|
||||
// Determine format from file extension or mimetype
|
||||
let format;
|
||||
if (req.file.mimetype === 'text/csv') {
|
||||
format = 'csv';
|
||||
} else if (req.file.mimetype === 'application/json') {
|
||||
format = 'json';
|
||||
} else if (req.file.mimetype.includes('spreadsheet') || req.file.mimetype.includes('excel')) {
|
||||
format = 'excel';
|
||||
} else {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Unsupported file format'
|
||||
});
|
||||
}
|
||||
|
||||
// Import data
|
||||
const result = await dataExchangeService.importData(
|
||||
entityType,
|
||||
req.file.buffer,
|
||||
format,
|
||||
{ updateExisting: updateExisting === 'true' }
|
||||
);
|
||||
|
||||
// Log import
|
||||
logger.info('Data imported', {
|
||||
userId: req.auth.userId,
|
||||
entityType,
|
||||
format,
|
||||
...result
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Get export templates
|
||||
* GET /api/data-exchange/templates
|
||||
*/
|
||||
router.get('/templates', authorize(['admin', 'manager']), async (req, res, next) => {
|
||||
try {
|
||||
const templates = dataExchangeService.getExportTemplates();
|
||||
res.json({
|
||||
success: true,
|
||||
templates
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Download template file
|
||||
* GET /api/data-exchange/templates/:entityType
|
||||
*/
|
||||
router.get('/templates/:entityType', authorize(['admin', 'manager']), async (req, res, next) => {
|
||||
try {
|
||||
const { entityType } = req.params;
|
||||
const { format = 'csv' } = req.query;
|
||||
|
||||
// Get template data
|
||||
const templates = dataExchangeService.getExportTemplates();
|
||||
if (!templates[entityType]) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Template not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Export template with sample data
|
||||
const result = await dataExchangeService.exportData(
|
||||
entityType,
|
||||
format,
|
||||
{}, // No filters for template
|
||||
{ limit: 1 } // Only include sample data
|
||||
);
|
||||
|
||||
// Modify filename for template
|
||||
const templateFilename = result.filename.replace('export', 'template');
|
||||
|
||||
// Set response headers
|
||||
res.setHeader('Content-Type', result.mimeType);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${templateFilename}"`);
|
||||
|
||||
// Send file
|
||||
if (format === 'excel') {
|
||||
res.send(Buffer.from(result.data));
|
||||
} else {
|
||||
res.send(result.data);
|
||||
}
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Validate import file
|
||||
* POST /api/data-exchange/validate/:entityType
|
||||
*/
|
||||
router.post('/validate/:entityType',
|
||||
authorize(['admin']),
|
||||
upload.single('file'),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const { entityType } = req.params;
|
||||
|
||||
if (!req.file) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'No file uploaded'
|
||||
});
|
||||
}
|
||||
|
||||
// Determine format
|
||||
let format;
|
||||
if (req.file.mimetype === 'text/csv') {
|
||||
format = 'csv';
|
||||
} else if (req.file.mimetype === 'application/json') {
|
||||
format = 'json';
|
||||
} else if (req.file.mimetype.includes('spreadsheet') || req.file.mimetype.includes('excel')) {
|
||||
format = 'excel';
|
||||
} else {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Unsupported file format'
|
||||
});
|
||||
}
|
||||
|
||||
// Parse file
|
||||
let parsedData;
|
||||
try {
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
parsedData = await dataExchangeService.parseCSV(req.file.buffer);
|
||||
break;
|
||||
case 'json':
|
||||
parsedData = await dataExchangeService.parseJSON(req.file.buffer);
|
||||
break;
|
||||
case 'excel':
|
||||
parsedData = await dataExchangeService.parseExcel(req.file.buffer);
|
||||
break;
|
||||
}
|
||||
} catch (parseError) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: `Failed to parse file: ${parseError.message}`
|
||||
});
|
||||
}
|
||||
|
||||
// Validate data
|
||||
const validationResult = await dataExchangeService.validateImportData(
|
||||
entityType,
|
||||
parsedData
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: validationResult.valid,
|
||||
totalRecords: parsedData.length,
|
||||
validRecords: parsedData.length - validationResult.errors.length,
|
||||
invalidRecords: validationResult.errors.length,
|
||||
errors: validationResult.errors
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Get export history
|
||||
* GET /api/data-exchange/history
|
||||
*/
|
||||
router.get('/history', authorize(['admin', 'manager']), async (req, res, next) => {
|
||||
try {
|
||||
// This would typically query a database of export/import logs
|
||||
// For now, return a placeholder
|
||||
res.json({
|
||||
success: true,
|
||||
history: [],
|
||||
message: 'Export/import history tracking to be implemented'
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
273
marketing-agent/services/api-gateway/src/routes/mock.js
Normal file
273
marketing-agent/services/api-gateway/src/routes/mock.js
Normal file
@@ -0,0 +1,273 @@
|
||||
import express from 'express';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Mock data for testing
|
||||
const mockData = {
|
||||
dashboard: {
|
||||
overview: {
|
||||
totalCampaigns: 12,
|
||||
activeCampaigns: 5,
|
||||
totalMessages: 45678,
|
||||
deliveryRate: 98.5,
|
||||
clickRate: 12.3,
|
||||
conversionRate: 3.2
|
||||
},
|
||||
recentActivity: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'campaign_started',
|
||||
campaign: 'Summer Sale 2025',
|
||||
timestamp: new Date(Date.now() - 3600000).toISOString()
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'message_sent',
|
||||
count: 1500,
|
||||
campaign: 'Welcome Series',
|
||||
timestamp: new Date(Date.now() - 7200000).toISOString()
|
||||
}
|
||||
],
|
||||
performance: {
|
||||
daily: [
|
||||
{ date: '2025-07-20', sent: 5000, delivered: 4900, clicked: 600 },
|
||||
{ date: '2025-07-21', sent: 5500, delivered: 5400, clicked: 720 },
|
||||
{ date: '2025-07-22', sent: 4800, delivered: 4700, clicked: 580 },
|
||||
{ date: '2025-07-23', sent: 6200, delivered: 6100, clicked: 850 },
|
||||
{ date: '2025-07-24', sent: 5800, delivered: 5700, clicked: 690 },
|
||||
{ date: '2025-07-25', sent: 6500, delivered: 6400, clicked: 820 },
|
||||
{ date: '2025-07-26', sent: 3200, delivered: 3150, clicked: 390 }
|
||||
]
|
||||
}
|
||||
},
|
||||
campaigns: [
|
||||
{
|
||||
id: 'c1',
|
||||
name: 'Summer Sale 2025',
|
||||
status: 'active',
|
||||
type: 'promotional',
|
||||
startDate: '2025-07-01',
|
||||
messages: 12500,
|
||||
deliveryRate: 99.2,
|
||||
clickRate: 15.8
|
||||
},
|
||||
{
|
||||
id: 'c2',
|
||||
name: 'Welcome Series',
|
||||
status: 'active',
|
||||
type: 'onboarding',
|
||||
startDate: '2025-06-15',
|
||||
messages: 8900,
|
||||
deliveryRate: 98.5,
|
||||
clickRate: 22.1
|
||||
}
|
||||
],
|
||||
messages: {
|
||||
templates: [
|
||||
{
|
||||
id: 'm1',
|
||||
name: 'Welcome Message',
|
||||
content: 'Welcome to our service! {{name}}',
|
||||
category: 'onboarding',
|
||||
usage: 1234
|
||||
},
|
||||
{
|
||||
id: 'm2',
|
||||
name: 'Promotion Alert',
|
||||
content: 'Special offer: {{discount}}% off!',
|
||||
category: 'promotional',
|
||||
usage: 5678
|
||||
}
|
||||
],
|
||||
recent: [
|
||||
{
|
||||
id: 'msg1',
|
||||
campaignId: 'c1',
|
||||
status: 'delivered',
|
||||
sentAt: new Date(Date.now() - 3600000).toISOString(),
|
||||
recipient: '+1234567890'
|
||||
}
|
||||
]
|
||||
},
|
||||
abTests: [
|
||||
{
|
||||
id: 'ab1',
|
||||
name: 'Button Color Test',
|
||||
status: 'running',
|
||||
variants: [
|
||||
{ id: 'v1', name: 'Blue Button', conversions: 123, visitors: 1000 },
|
||||
{ id: 'v2', name: 'Green Button', conversions: 145, visitors: 1000 }
|
||||
],
|
||||
confidence: 92.5
|
||||
}
|
||||
],
|
||||
accounts: [
|
||||
{
|
||||
id: 'acc1',
|
||||
phone: '+1234567890',
|
||||
status: 'active',
|
||||
username: 'marketing_bot_1',
|
||||
lastActive: new Date(Date.now() - 600000).toISOString()
|
||||
},
|
||||
{
|
||||
id: 'acc2',
|
||||
phone: '+0987654321',
|
||||
status: 'active',
|
||||
username: 'marketing_bot_2',
|
||||
lastActive: new Date(Date.now() - 1200000).toISOString()
|
||||
}
|
||||
],
|
||||
compliance: {
|
||||
gdpr: {
|
||||
status: 'compliant',
|
||||
lastAudit: '2025-07-15',
|
||||
dataRequests: 23,
|
||||
deletionRequests: 5
|
||||
},
|
||||
ccpa: {
|
||||
status: 'compliant',
|
||||
lastAudit: '2025-07-10',
|
||||
optOutRequests: 12
|
||||
}
|
||||
},
|
||||
settings: {
|
||||
general: {
|
||||
companyName: 'Marketing Agency',
|
||||
timezone: 'America/New_York',
|
||||
language: 'en'
|
||||
},
|
||||
notifications: {
|
||||
email: true,
|
||||
sms: false,
|
||||
webhooks: true
|
||||
},
|
||||
apiKeys: [
|
||||
{
|
||||
id: 'key1',
|
||||
name: 'Production API',
|
||||
created: '2025-06-01',
|
||||
lastUsed: '2025-07-26'
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
// Mock endpoints
|
||||
router.get('/analytics/dashboard', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.dashboard
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/orchestrator/campaigns', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
campaigns: mockData.campaigns,
|
||||
total: mockData.campaigns.length
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
router.post('/orchestrator/campaigns', (req, res) => {
|
||||
const newCampaign = {
|
||||
id: 'c' + Date.now(),
|
||||
...req.body,
|
||||
status: 'draft',
|
||||
messages: 0,
|
||||
deliveryRate: 0,
|
||||
clickRate: 0
|
||||
};
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: newCampaign
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/orchestrator/messages/templates', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.messages.templates
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/orchestrator/messages/history', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.messages.recent
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/abTesting/experiments', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.abTests
|
||||
});
|
||||
});
|
||||
|
||||
router.post('/abTesting/experiments', (req, res) => {
|
||||
const newTest = {
|
||||
id: 'ab' + Date.now(),
|
||||
...req.body,
|
||||
status: 'draft',
|
||||
confidence: 0
|
||||
};
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
data: newTest
|
||||
});
|
||||
});
|
||||
|
||||
// Forward Telegram account requests to gramjs-adapter service
|
||||
router.use('/gramjsAdapter/*', (req, res, next) => {
|
||||
// Use proxy route for real Telegram functionality
|
||||
req.url = req.url.replace('/gramjsAdapter', '/gramjs-adapter');
|
||||
next('route');
|
||||
});
|
||||
|
||||
// Mock data for testing when gramjs-adapter is not available
|
||||
router.get('/gramjsAdapter/accounts', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.accounts
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/complianceGuard/status', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.compliance
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/settings', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: mockData.settings
|
||||
});
|
||||
});
|
||||
|
||||
router.put('/settings', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: { ...mockData.settings, ...req.body }
|
||||
});
|
||||
});
|
||||
|
||||
// Analytics endpoints
|
||||
router.get('/analytics/metrics/overview', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
metrics: [
|
||||
{ name: 'Total Messages', value: 45678, change: 12.5 },
|
||||
{ name: 'Delivery Rate', value: 98.5, change: 0.3 },
|
||||
{ name: 'Click Rate', value: 12.3, change: -1.2 },
|
||||
{ name: 'Conversion Rate', value: 3.2, change: 0.8 }
|
||||
]
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
export default router;
|
||||
338
marketing-agent/services/api-gateway/src/routes/monitoring.js
Normal file
338
marketing-agent/services/api-gateway/src/routes/monitoring.js
Normal file
@@ -0,0 +1,338 @@
|
||||
import express from 'express';
|
||||
import { authenticate } from '../middleware/auth.js';
|
||||
import { requireRole } from '../middleware/permission.js';
|
||||
import {
|
||||
getMetrics,
|
||||
getMetricsContentType,
|
||||
getDashboardMetrics,
|
||||
recordBusinessMetrics
|
||||
} from '../services/monitoring.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/metrics:
|
||||
* get:
|
||||
* summary: Get Prometheus metrics
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Prometheus metrics in text format
|
||||
*/
|
||||
router.get('/metrics', authenticate, requireRole('admin', 'manager'), async (req, res) => {
|
||||
try {
|
||||
const metrics = await getMetrics();
|
||||
res.set('Content-Type', getMetricsContentType());
|
||||
res.send(metrics);
|
||||
} catch (error) {
|
||||
logger.error('Failed to get metrics:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to retrieve metrics'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/dashboard:
|
||||
* get:
|
||||
* summary: Get monitoring dashboard data
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Dashboard metrics and alerts
|
||||
*/
|
||||
router.get('/dashboard', authenticate, requireRole('admin', 'manager'), async (req, res) => {
|
||||
try {
|
||||
const dashboardData = await getDashboardMetrics();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: dashboardData
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get dashboard data:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to retrieve dashboard data'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/alerts:
|
||||
* get:
|
||||
* summary: Get system alerts
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* description: Number of alerts to retrieve
|
||||
* - in: query
|
||||
* name: severity
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [critical, warning, info]
|
||||
* description: Filter by severity
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of system alerts
|
||||
*/
|
||||
router.get('/alerts', authenticate, requireRole('admin', 'manager', 'operator'), async (req, res) => {
|
||||
try {
|
||||
const { limit = 20, severity } = req.query;
|
||||
|
||||
// Get alerts from cache
|
||||
let alerts = await cache.lrange('system:alerts', 0, limit - 1);
|
||||
alerts = alerts.map(a => JSON.parse(a));
|
||||
|
||||
// Filter by severity if provided
|
||||
if (severity) {
|
||||
alerts = alerts.filter(a => a.severity === severity);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
alerts,
|
||||
total: alerts.length
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get alerts:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to retrieve alerts'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/alerts/{alertId}/acknowledge:
|
||||
* post:
|
||||
* summary: Acknowledge an alert
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: alertId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Alert ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Alert acknowledged
|
||||
*/
|
||||
router.post('/alerts/:alertId/acknowledge', authenticate, requireRole('admin', 'manager'), async (req, res) => {
|
||||
try {
|
||||
const { alertId } = req.params;
|
||||
|
||||
// Mark alert as acknowledged
|
||||
await cache.hset(`alert:${alertId}`, 'acknowledged', 'true');
|
||||
await cache.hset(`alert:${alertId}`, 'acknowledgedBy', req.user.id);
|
||||
await cache.hset(`alert:${alertId}`, 'acknowledgedAt', new Date().toISOString());
|
||||
|
||||
logger.info('Alert acknowledged', { alertId, userId: req.user.id });
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Alert acknowledged'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to acknowledge alert:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to acknowledge alert'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/health:
|
||||
* get:
|
||||
* summary: Get service health status
|
||||
* tags: [Monitoring]
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Service health information
|
||||
*/
|
||||
router.get('/health', async (req, res) => {
|
||||
try {
|
||||
const health = {
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: process.uptime(),
|
||||
memory: process.memoryUsage(),
|
||||
cpu: process.cpuUsage()
|
||||
};
|
||||
|
||||
// Check critical components
|
||||
try {
|
||||
await cache.ping();
|
||||
health.redis = 'healthy';
|
||||
} catch (error) {
|
||||
health.redis = 'unhealthy';
|
||||
health.status = 'degraded';
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: health
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Health check failed:', error);
|
||||
res.status(503).json({
|
||||
success: false,
|
||||
error: 'Service unhealthy',
|
||||
status: 'unhealthy'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/events:
|
||||
* post:
|
||||
* summary: Record a monitoring event
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - eventType
|
||||
* - data
|
||||
* properties:
|
||||
* eventType:
|
||||
* type: string
|
||||
* data:
|
||||
* type: object
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Event recorded
|
||||
*/
|
||||
router.post('/events', authenticate, async (req, res) => {
|
||||
try {
|
||||
const { eventType, data } = req.body;
|
||||
|
||||
// Record the event
|
||||
recordBusinessMetrics(eventType, {
|
||||
...data,
|
||||
userId: req.user.id,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
// Store event for audit
|
||||
await cache.lpush('monitoring:events', JSON.stringify({
|
||||
eventType,
|
||||
data,
|
||||
userId: req.user.id,
|
||||
timestamp: new Date().toISOString()
|
||||
}));
|
||||
await cache.ltrim('monitoring:events', 0, 999); // Keep last 1000 events
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Event recorded'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to record event:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to record event'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /api/v1/monitoring/logs:
|
||||
* get:
|
||||
* summary: Get application logs
|
||||
* tags: [Monitoring]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: level
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [error, warn, info, debug]
|
||||
* description: Log level filter
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 100
|
||||
* description: Number of logs to retrieve
|
||||
* - in: query
|
||||
* name: service
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Filter by service name
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Application logs
|
||||
*/
|
||||
router.get('/logs', authenticate, requireRole('admin'), async (req, res) => {
|
||||
try {
|
||||
const { level, limit = 100, service } = req.query;
|
||||
|
||||
// In production, this would query your log aggregation service
|
||||
// For now, returning recent logs from cache
|
||||
let logs = await cache.lrange('app:logs', 0, limit - 1);
|
||||
logs = logs.map(l => {
|
||||
try {
|
||||
return JSON.parse(l);
|
||||
} catch (e) {
|
||||
return { message: l };
|
||||
}
|
||||
});
|
||||
|
||||
// Apply filters
|
||||
if (level) {
|
||||
logs = logs.filter(l => l.level === level);
|
||||
}
|
||||
if (service) {
|
||||
logs = logs.filter(l => l.service === service);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
logs,
|
||||
total: logs.length
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to get logs:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to retrieve logs'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
377
marketing-agent/services/api-gateway/src/routes/proxy.js
Normal file
377
marketing-agent/services/api-gateway/src/routes/proxy.js
Normal file
@@ -0,0 +1,377 @@
|
||||
import express from 'express';
|
||||
import { createProxyMiddleware } from 'http-proxy-middleware';
|
||||
import { serviceDiscovery } from '../services/serviceDiscovery.js';
|
||||
import { authenticate, optionalAuth } from '../middleware/auth.js';
|
||||
import { globalRateLimiter, endpointRateLimiter } from '../middleware/rateLimiter.js';
|
||||
import { logger, logServiceCall } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Create proxy middleware for a service
|
||||
*/
|
||||
const createServiceProxy = (serviceName, pathRewrite = true) => {
|
||||
const pathRewriteConfig = pathRewrite ? {
|
||||
[`^/api/v1/${serviceName}`]: ''
|
||||
} : undefined;
|
||||
|
||||
return createProxyMiddleware({
|
||||
target: serviceDiscovery.getService(serviceName).url,
|
||||
changeOrigin: true,
|
||||
pathRewrite: pathRewriteConfig,
|
||||
onProxyReq: (proxyReq, req, res) => {
|
||||
// Add request ID
|
||||
if (req.id) {
|
||||
proxyReq.setHeader('X-Request-ID', req.id);
|
||||
}
|
||||
|
||||
// Forward user context
|
||||
if (req.user) {
|
||||
proxyReq.setHeader('X-User-ID', req.user.id);
|
||||
proxyReq.setHeader('X-Account-ID', req.user.accountId);
|
||||
proxyReq.setHeader('X-User-Role', req.user.role);
|
||||
}
|
||||
|
||||
// Add internal service header
|
||||
proxyReq.setHeader('X-Internal-Service', 'api-gateway');
|
||||
|
||||
// Log outgoing request
|
||||
logger.debug(`Proxying request to ${serviceName}`, {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
service: serviceName
|
||||
});
|
||||
},
|
||||
onProxyRes: (proxyRes, req, res) => {
|
||||
const duration = Date.now() - req.startTime;
|
||||
logServiceCall(
|
||||
serviceName,
|
||||
req.method,
|
||||
req.originalUrl,
|
||||
proxyRes.statusCode,
|
||||
duration
|
||||
);
|
||||
},
|
||||
onError: (err, req, res) => {
|
||||
logger.error(`Proxy error for ${serviceName}:`, err);
|
||||
|
||||
// Mark service as unhealthy if connection failed
|
||||
if (err.code === 'ECONNREFUSED' || err.code === 'ETIMEDOUT') {
|
||||
serviceDiscovery.markServiceUnhealthy(serviceName);
|
||||
}
|
||||
|
||||
res.status(502).json({
|
||||
success: false,
|
||||
error: 'Service temporarily unavailable',
|
||||
service: serviceName
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Health check for all services
|
||||
*/
|
||||
router.get('/health/services', async (req, res) => {
|
||||
const health = serviceDiscovery.getAggregatedHealth();
|
||||
res.status(health.status === 'healthy' ? 200 : 503).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* Orchestrator routes
|
||||
*/
|
||||
router.use('/orchestrator',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
(req, res, next) => {
|
||||
req.startTime = Date.now();
|
||||
next();
|
||||
},
|
||||
createServiceProxy('orchestrator')
|
||||
);
|
||||
|
||||
/**
|
||||
* Claude Agent routes
|
||||
*/
|
||||
router.use('/claude',
|
||||
authenticate,
|
||||
endpointRateLimiter('/api/v1/claude', { max: 20 }), // Lower rate limit for AI operations
|
||||
createServiceProxy('claudeAgent')
|
||||
);
|
||||
|
||||
/**
|
||||
* GramJS Adapter routes
|
||||
*/
|
||||
router.use('/gramjs-adapter',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('gramjsAdapter')
|
||||
);
|
||||
|
||||
// Also support /telegram alias for backward compatibility
|
||||
router.use('/telegram',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('gramjsAdapter')
|
||||
);
|
||||
|
||||
/**
|
||||
* Safety Guard routes
|
||||
*/
|
||||
router.use('/safety',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('safetyGuard')
|
||||
);
|
||||
|
||||
/**
|
||||
* Analytics routes with caching
|
||||
*/
|
||||
router.get('/analytics/*',
|
||||
authenticate,
|
||||
cache.middleware((req) => `analytics:${req.user.accountId}:${req.path}`, 300), // 5 min cache
|
||||
createServiceProxy('analytics')
|
||||
);
|
||||
|
||||
router.use('/analytics',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('analytics')
|
||||
);
|
||||
|
||||
/**
|
||||
* Compliance routes
|
||||
*/
|
||||
router.use('/compliance',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('complianceGuard')
|
||||
);
|
||||
|
||||
/**
|
||||
* A/B Testing routes
|
||||
*/
|
||||
router.use('/ab-testing',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('abTesting')
|
||||
);
|
||||
|
||||
// Also support /experiments alias
|
||||
router.use('/experiments',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('abTesting')
|
||||
);
|
||||
|
||||
/**
|
||||
* Workflow routes
|
||||
*/
|
||||
router.use('/workflow',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('workflow')
|
||||
);
|
||||
|
||||
// Also support /automation alias
|
||||
router.use('/automation',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('workflow')
|
||||
);
|
||||
|
||||
/**
|
||||
* Webhook routes
|
||||
*/
|
||||
router.use('/webhooks',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('webhook')
|
||||
);
|
||||
|
||||
router.use('/webhook-events',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('webhook')
|
||||
);
|
||||
|
||||
/**
|
||||
* Template routes
|
||||
*/
|
||||
router.use('/templates',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('template')
|
||||
);
|
||||
|
||||
router.use('/template-categories',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('template')
|
||||
);
|
||||
|
||||
router.use('/template-variables',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('template')
|
||||
);
|
||||
|
||||
/**
|
||||
* I18n routes
|
||||
*/
|
||||
router.use('/translations',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('i18n')
|
||||
);
|
||||
|
||||
router.use('/languages',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('i18n')
|
||||
);
|
||||
|
||||
router.use('/localization',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('i18n')
|
||||
);
|
||||
|
||||
/**
|
||||
* User Management routes
|
||||
*/
|
||||
router.use('/users',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
/**
|
||||
* Billing routes
|
||||
*/
|
||||
router.use('/billing/subscriptions',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('billing')
|
||||
);
|
||||
|
||||
router.use('/billing/invoices',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('billing')
|
||||
);
|
||||
|
||||
router.use('/billing/payment-methods',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('billing')
|
||||
);
|
||||
|
||||
router.use('/billing/transactions',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('billing')
|
||||
);
|
||||
|
||||
// Webhook endpoint for billing (no auth required)
|
||||
router.use('/billing/webhooks',
|
||||
createServiceProxy('billing', false)
|
||||
);
|
||||
|
||||
router.use('/users-stats',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/groups',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/groups-stats',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/tags',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/tags-stats',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/tag-categories',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/tags-popular',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/tags-suggestions',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/segments',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
router.use('/segments-stats',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('userManagement')
|
||||
);
|
||||
|
||||
/**
|
||||
* Scheduler routes
|
||||
*/
|
||||
router.use('/scheduled-campaigns',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('scheduler')
|
||||
);
|
||||
|
||||
router.use('/jobs',
|
||||
authenticate,
|
||||
globalRateLimiter,
|
||||
createServiceProxy('scheduler')
|
||||
);
|
||||
|
||||
/**
|
||||
* Public endpoints (no auth required)
|
||||
*/
|
||||
router.use('/public/analytics',
|
||||
optionalAuth,
|
||||
endpointRateLimiter('/api/v1/public/analytics', { max: 30 }),
|
||||
createServiceProxy('analytics')
|
||||
);
|
||||
|
||||
/**
|
||||
* Catch-all for undefined routes
|
||||
*/
|
||||
router.all('*', (req, res) => {
|
||||
res.status(404).json({
|
||||
success: false,
|
||||
error: 'Endpoint not found',
|
||||
path: req.path
|
||||
});
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -0,0 +1,692 @@
|
||||
/**
|
||||
* @swagger
|
||||
* components:
|
||||
* schemas:
|
||||
* ScheduledCampaign:
|
||||
* type: object
|
||||
* required:
|
||||
* - campaignId
|
||||
* - name
|
||||
* - type
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* description: Scheduled campaign unique identifier
|
||||
* example: sched_123456789
|
||||
* campaignId:
|
||||
* type: string
|
||||
* description: Associated campaign ID
|
||||
* example: camp_123456789
|
||||
* name:
|
||||
* type: string
|
||||
* description: Schedule name
|
||||
* example: Daily Morning Newsletter
|
||||
* description:
|
||||
* type: string
|
||||
* description: Schedule description
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [one-time, recurring, trigger-based]
|
||||
* description: Schedule type
|
||||
* example: recurring
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [active, paused, completed, cancelled]
|
||||
* description: Schedule status
|
||||
* example: active
|
||||
* schedule:
|
||||
* type: object
|
||||
* properties:
|
||||
* startDateTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* description: When to start the schedule
|
||||
* endDateTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* description: When to end the schedule (optional)
|
||||
* recurring:
|
||||
* type: object
|
||||
* properties:
|
||||
* pattern:
|
||||
* type: string
|
||||
* enum: [daily, weekly, monthly, custom]
|
||||
* example: daily
|
||||
* frequency:
|
||||
* type: object
|
||||
* properties:
|
||||
* interval:
|
||||
* type: integer
|
||||
* example: 1
|
||||
* unit:
|
||||
* type: string
|
||||
* enum: [day, week, month]
|
||||
* example: day
|
||||
* daysOfWeek:
|
||||
* type: array
|
||||
* items:
|
||||
* type: integer
|
||||
* minimum: 0
|
||||
* maximum: 6
|
||||
* description: Days of week (0=Sunday, 6=Saturday)
|
||||
* example: [1, 3, 5]
|
||||
* daysOfMonth:
|
||||
* type: array
|
||||
* items:
|
||||
* type: integer
|
||||
* minimum: 1
|
||||
* maximum: 31
|
||||
* description: Days of month
|
||||
* example: [1, 15]
|
||||
* time:
|
||||
* type: string
|
||||
* pattern: '^([01]?[0-9]|2[0-3]):[0-5][0-9]$'
|
||||
* description: Time in HH:MM format
|
||||
* example: "09:00"
|
||||
* timezone:
|
||||
* type: string
|
||||
* description: Timezone for schedule
|
||||
* example: America/New_York
|
||||
* triggers:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* event:
|
||||
* type: string
|
||||
* example: user_signup
|
||||
* conditions:
|
||||
* type: object
|
||||
* delay:
|
||||
* type: object
|
||||
* properties:
|
||||
* value:
|
||||
* type: integer
|
||||
* unit:
|
||||
* type: string
|
||||
* enum: [minutes, hours, days]
|
||||
* executions:
|
||||
* type: array
|
||||
* readOnly: true
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* executionId:
|
||||
* type: string
|
||||
* executedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [completed, failed]
|
||||
* recipientsCount:
|
||||
* type: integer
|
||||
* nextRunTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* description: Next scheduled execution time
|
||||
* lastRunTime:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* description: Last execution time
|
||||
* totalExecutions:
|
||||
* type: integer
|
||||
* readOnly: true
|
||||
* description: Total number of executions
|
||||
* createdAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* updatedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* readOnly: true
|
||||
* createdBy:
|
||||
* type: string
|
||||
* readOnly: true
|
||||
*
|
||||
* ScheduleJob:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* description: Job unique identifier
|
||||
* scheduleId:
|
||||
* type: string
|
||||
* description: Associated schedule ID
|
||||
* campaignId:
|
||||
* type: string
|
||||
* description: Associated campaign ID
|
||||
* scheduledFor:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* description: When the job should run
|
||||
* status:
|
||||
* type: string
|
||||
* enum: [pending, processing, completed, failed, cancelled]
|
||||
* attempts:
|
||||
* type: integer
|
||||
* description: Number of execution attempts
|
||||
* lastError:
|
||||
* type: string
|
||||
* description: Last error message if failed
|
||||
* completedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* result:
|
||||
* type: object
|
||||
* description: Execution result details
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/scheduled-campaigns:
|
||||
* get:
|
||||
* summary: List all scheduled campaigns
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: page
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 1
|
||||
* description: Page number
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* maximum: 100
|
||||
* description: Items per page
|
||||
* - in: query
|
||||
* name: status
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [active, paused, completed, cancelled]
|
||||
* description: Filter by status
|
||||
* - in: query
|
||||
* name: type
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [one-time, recurring, trigger-based]
|
||||
* description: Filter by type
|
||||
* - in: query
|
||||
* name: campaignId
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Filter by campaign ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of scheduled campaigns
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedules:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* pagination:
|
||||
* $ref: '#/components/schemas/Pagination'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* post:
|
||||
* summary: Create scheduled campaign
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* required:
|
||||
* - campaignId
|
||||
* - name
|
||||
* - type
|
||||
* - schedule
|
||||
* properties:
|
||||
* campaignId:
|
||||
* type: string
|
||||
* example: camp_123456789
|
||||
* name:
|
||||
* type: string
|
||||
* example: Daily Morning Newsletter
|
||||
* description:
|
||||
* type: string
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [one-time, recurring, trigger-based]
|
||||
* example: recurring
|
||||
* schedule:
|
||||
* type: object
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Scheduled campaign created successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedule:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/scheduled-campaigns/{id}:
|
||||
* get:
|
||||
* summary: Get scheduled campaign by ID
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Scheduled campaign details
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedule:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* put:
|
||||
* summary: Update scheduled campaign
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* schedule:
|
||||
* type: object
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Scheduled campaign updated successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedule:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* 400:
|
||||
* $ref: '#/components/responses/ValidationError'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*
|
||||
* delete:
|
||||
* summary: Delete scheduled campaign
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Scheduled campaign deleted successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* message:
|
||||
* type: string
|
||||
* example: Schedule deleted successfully
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/scheduled-campaigns/{id}/pause:
|
||||
* post:
|
||||
* summary: Pause scheduled campaign
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Schedule paused successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedule:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/scheduled-campaigns/{id}/resume:
|
||||
* post:
|
||||
* summary: Resume scheduled campaign
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Schedule resumed successfully
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* schedule:
|
||||
* $ref: '#/components/schemas/ScheduledCampaign'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/scheduled-campaigns/{id}/preview:
|
||||
* get:
|
||||
* summary: Preview next executions
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Schedule ID
|
||||
* - in: query
|
||||
* name: count
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 10
|
||||
* minimum: 1
|
||||
* maximum: 50
|
||||
* description: Number of future executions to preview
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Preview of next executions
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* preview:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* scheduledFor:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* dayOfWeek:
|
||||
* type: string
|
||||
* example: Monday
|
||||
* localTime:
|
||||
* type: string
|
||||
* example: "09:00 AM EST"
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/jobs:
|
||||
* get:
|
||||
* summary: List schedule jobs
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: scheduleId
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Filter by schedule ID
|
||||
* - in: query
|
||||
* name: status
|
||||
* schema:
|
||||
* type: string
|
||||
* enum: [pending, processing, completed, failed, cancelled]
|
||||
* description: Filter by status
|
||||
* - in: query
|
||||
* name: dateFrom
|
||||
* schema:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* description: Filter jobs scheduled from this date
|
||||
* - in: query
|
||||
* name: dateTo
|
||||
* schema:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* description: Filter jobs scheduled until this date
|
||||
* - in: query
|
||||
* name: page
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 1
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of schedule jobs
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* jobs:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/ScheduleJob'
|
||||
* pagination:
|
||||
* $ref: '#/components/schemas/Pagination'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/jobs/{id}:
|
||||
* get:
|
||||
* summary: Get job details
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Job ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Job details
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* job:
|
||||
* $ref: '#/components/schemas/ScheduleJob'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @swagger
|
||||
* /scheduler/jobs/{id}/retry:
|
||||
* post:
|
||||
* summary: Retry failed job
|
||||
* tags: [Scheduled Campaigns]
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* description: Job ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Job retry initiated
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* job:
|
||||
* $ref: '#/components/schemas/ScheduleJob'
|
||||
* 400:
|
||||
* description: Job cannot be retried
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* $ref: '#/components/schemas/Error'
|
||||
* 404:
|
||||
* $ref: '#/components/responses/NotFoundError'
|
||||
* 401:
|
||||
* $ref: '#/components/responses/UnauthorizedError'
|
||||
*/
|
||||
|
||||
// This is a documentation-only file for Swagger
|
||||
export default {};
|
||||
427
marketing-agent/services/api-gateway/src/routes/tenants.js
Normal file
427
marketing-agent/services/api-gateway/src/routes/tenants.js
Normal file
@@ -0,0 +1,427 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const Tenant = require('../models/Tenant');
|
||||
const User = require('../models/User');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { requireTenantAdmin, requireFeature } = require('../middleware/tenantMiddleware');
|
||||
const { validateRequest } = require('../middleware/validation');
|
||||
const { body, param, query } = require('express-validator');
|
||||
const { logger } = require('../utils/logger');
|
||||
|
||||
// Create new tenant (public endpoint for signup)
|
||||
router.post('/signup',
|
||||
validateRequest([
|
||||
body('tenantName').notEmpty().trim(),
|
||||
body('email').isEmail().normalizeEmail(),
|
||||
body('password').isLength({ min: 8 }),
|
||||
body('username').isLength({ min: 3, max: 30 }),
|
||||
body('plan').optional().isIn(['free', 'starter', 'professional', 'enterprise']),
|
||||
body('domain').optional().isFQDN()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { tenantName, email, password, username, plan = 'free', domain } = req.body;
|
||||
|
||||
// Generate tenant slug
|
||||
const baseSlug = Tenant.generateSlug(tenantName);
|
||||
let slug = baseSlug;
|
||||
let counter = 1;
|
||||
|
||||
// Ensure unique slug
|
||||
while (await Tenant.findOne({ slug })) {
|
||||
slug = `${baseSlug}-${counter}`;
|
||||
counter++;
|
||||
}
|
||||
|
||||
// Create tenant
|
||||
const tenant = new Tenant({
|
||||
name: tenantName,
|
||||
slug,
|
||||
domain,
|
||||
plan,
|
||||
status: plan === 'free' ? 'trial' : 'active',
|
||||
owner: {
|
||||
email,
|
||||
name: username
|
||||
},
|
||||
trial: {
|
||||
startDate: new Date(),
|
||||
endDate: new Date(Date.now() + 14 * 24 * 60 * 60 * 1000) // 14 days trial
|
||||
}
|
||||
});
|
||||
|
||||
// Set plan-specific limits
|
||||
const planLimits = {
|
||||
free: {
|
||||
users: 5,
|
||||
campaigns: 10,
|
||||
messagesPerMonth: 1000,
|
||||
telegramAccounts: 1,
|
||||
storage: 1073741824, // 1GB
|
||||
apiCallsPerHour: 1000
|
||||
},
|
||||
starter: {
|
||||
users: 20,
|
||||
campaigns: 50,
|
||||
messagesPerMonth: 10000,
|
||||
telegramAccounts: 3,
|
||||
storage: 5368709120, // 5GB
|
||||
apiCallsPerHour: 5000
|
||||
},
|
||||
professional: {
|
||||
users: 100,
|
||||
campaigns: 200,
|
||||
messagesPerMonth: 50000,
|
||||
telegramAccounts: 10,
|
||||
storage: 21474836480, // 20GB
|
||||
apiCallsPerHour: 20000
|
||||
},
|
||||
enterprise: {
|
||||
users: -1, // unlimited
|
||||
campaigns: -1,
|
||||
messagesPerMonth: -1,
|
||||
telegramAccounts: -1,
|
||||
storage: -1,
|
||||
apiCallsPerHour: -1
|
||||
}
|
||||
};
|
||||
|
||||
tenant.limits = planLimits[plan];
|
||||
|
||||
// Enable features based on plan
|
||||
const planFeatures = {
|
||||
free: {
|
||||
campaigns: true,
|
||||
analytics: true
|
||||
},
|
||||
starter: {
|
||||
campaigns: true,
|
||||
analytics: true,
|
||||
automation: true,
|
||||
apiAccess: true
|
||||
},
|
||||
professional: {
|
||||
campaigns: true,
|
||||
analytics: true,
|
||||
automation: true,
|
||||
apiAccess: true,
|
||||
abTesting: true,
|
||||
customReports: true,
|
||||
multiLanguage: true,
|
||||
advancedSegmentation: true
|
||||
},
|
||||
enterprise: {
|
||||
campaigns: true,
|
||||
analytics: true,
|
||||
automation: true,
|
||||
apiAccess: true,
|
||||
abTesting: true,
|
||||
customReports: true,
|
||||
multiLanguage: true,
|
||||
advancedSegmentation: true,
|
||||
whiteLabel: true,
|
||||
aiSuggestions: true
|
||||
}
|
||||
};
|
||||
|
||||
tenant.features = planFeatures[plan];
|
||||
|
||||
await tenant.save();
|
||||
|
||||
// Create admin user for tenant
|
||||
const user = new User({
|
||||
tenantId: tenant._id,
|
||||
username,
|
||||
email,
|
||||
password,
|
||||
role: 'admin'
|
||||
});
|
||||
|
||||
await user.save();
|
||||
|
||||
// Update tenant owner reference
|
||||
tenant.owner.userId = user._id;
|
||||
await tenant.save();
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
tenant: {
|
||||
id: tenant._id,
|
||||
name: tenant.name,
|
||||
slug: tenant.slug,
|
||||
domain: tenant.domain,
|
||||
plan: tenant.plan,
|
||||
trialEndsAt: tenant.trial.endDate
|
||||
},
|
||||
user: {
|
||||
id: user._id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
role: user.role
|
||||
},
|
||||
loginUrl: tenant.domain ? `https://${tenant.domain}/login` : `https://${tenant.slug}.app.com/login`
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Tenant signup error:', error);
|
||||
res.status(500).json({ error: 'Failed to create tenant' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Get tenant details
|
||||
router.get('/current',
|
||||
authenticateToken,
|
||||
async (req, res) => {
|
||||
try {
|
||||
if (!req.tenant) {
|
||||
return res.status(404).json({ error: 'Tenant not found' });
|
||||
}
|
||||
|
||||
const tenant = req.tenant.toObject();
|
||||
|
||||
// Remove sensitive information
|
||||
delete tenant.billing.customerId;
|
||||
delete tenant.billing.subscriptionId;
|
||||
|
||||
res.json({
|
||||
tenant,
|
||||
usagePercentage: req.tenant.usagePercentage,
|
||||
isTrialActive: req.tenant.isTrialActive
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Get tenant error:', error);
|
||||
res.status(500).json({ error: 'Failed to get tenant details' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Update tenant settings
|
||||
router.patch('/current/settings',
|
||||
authenticateToken,
|
||||
requireTenantAdmin,
|
||||
validateRequest([
|
||||
body('settings').isObject(),
|
||||
body('settings.timezone').optional().isString(),
|
||||
body('settings.language').optional().isString(),
|
||||
body('settings.dateFormat').optional().isString(),
|
||||
body('settings.timeFormat').optional().isIn(['12h', '24h']),
|
||||
body('settings.currency').optional().isString(),
|
||||
body('settings.allowSignup').optional().isBoolean(),
|
||||
body('settings.requireEmailVerification').optional().isBoolean(),
|
||||
body('settings.twoFactorAuth').optional().isBoolean()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { settings } = req.body;
|
||||
|
||||
// Update only provided settings
|
||||
Object.keys(settings).forEach(key => {
|
||||
if (req.tenant.settings[key] !== undefined) {
|
||||
req.tenant.settings[key] = settings[key];
|
||||
}
|
||||
});
|
||||
|
||||
await req.tenant.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
settings: req.tenant.settings
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Update tenant settings error:', error);
|
||||
res.status(500).json({ error: 'Failed to update settings' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Update tenant branding
|
||||
router.patch('/current/branding',
|
||||
authenticateToken,
|
||||
requireTenantAdmin,
|
||||
requireFeature('whiteLabel'),
|
||||
validateRequest([
|
||||
body('branding').isObject(),
|
||||
body('branding.logo').optional().isURL(),
|
||||
body('branding.primaryColor').optional().matches(/^#[0-9A-F]{6}$/i),
|
||||
body('branding.secondaryColor').optional().matches(/^#[0-9A-F]{6}$/i),
|
||||
body('branding.customCss').optional().isString(),
|
||||
body('branding.emailFooter').optional().isString(),
|
||||
body('branding.supportEmail').optional().isEmail(),
|
||||
body('branding.supportUrl').optional().isURL()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { branding } = req.body;
|
||||
|
||||
// Update only provided branding options
|
||||
Object.keys(branding).forEach(key => {
|
||||
if (req.tenant.branding[key] !== undefined) {
|
||||
req.tenant.branding[key] = branding[key];
|
||||
}
|
||||
});
|
||||
|
||||
await req.tenant.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
branding: req.tenant.branding
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Update tenant branding error:', error);
|
||||
res.status(500).json({ error: 'Failed to update branding' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Get tenant usage statistics
|
||||
router.get('/current/usage',
|
||||
authenticateToken,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const tenant = req.tenant;
|
||||
|
||||
res.json({
|
||||
usage: tenant.usage,
|
||||
limits: tenant.limits,
|
||||
percentages: tenant.usagePercentage,
|
||||
plan: tenant.plan,
|
||||
billingPeriod: {
|
||||
start: tenant.usage.lastResetDate,
|
||||
end: new Date(tenant.usage.lastResetDate.getTime() + 30 * 24 * 60 * 60 * 1000)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Get tenant usage error:', error);
|
||||
res.status(500).json({ error: 'Failed to get usage statistics' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// List all tenants (superadmin only)
|
||||
router.get('/',
|
||||
authenticateToken,
|
||||
async (req, res) => {
|
||||
try {
|
||||
if (req.user.role !== 'superadmin') {
|
||||
return res.status(403).json({ error: 'Access denied' });
|
||||
}
|
||||
|
||||
const { page = 1, limit = 20, status, plan, search } = req.query;
|
||||
|
||||
const query = {};
|
||||
if (status) query.status = status;
|
||||
if (plan) query.plan = plan;
|
||||
if (search) {
|
||||
query.$or = [
|
||||
{ name: { $regex: search, $options: 'i' } },
|
||||
{ slug: { $regex: search, $options: 'i' } },
|
||||
{ domain: { $regex: search, $options: 'i' } },
|
||||
{ 'owner.email': { $regex: search, $options: 'i' } }
|
||||
];
|
||||
}
|
||||
|
||||
const tenants = await Tenant.find(query)
|
||||
.select('-billing.customerId -billing.subscriptionId')
|
||||
.sort({ createdAt: -1 })
|
||||
.limit(limit * 1)
|
||||
.skip((page - 1) * limit);
|
||||
|
||||
const total = await Tenant.countDocuments(query);
|
||||
|
||||
res.json({
|
||||
tenants,
|
||||
pagination: {
|
||||
page: parseInt(page),
|
||||
limit: parseInt(limit),
|
||||
total,
|
||||
pages: Math.ceil(total / limit)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('List tenants error:', error);
|
||||
res.status(500).json({ error: 'Failed to list tenants' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Update tenant (superadmin only)
|
||||
router.patch('/:tenantId',
|
||||
authenticateToken,
|
||||
validateRequest([
|
||||
param('tenantId').isMongoId(),
|
||||
body('status').optional().isIn(['active', 'suspended', 'inactive', 'trial']),
|
||||
body('plan').optional().isIn(['free', 'starter', 'professional', 'enterprise', 'custom']),
|
||||
body('limits').optional().isObject(),
|
||||
body('features').optional().isObject()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
if (req.user.role !== 'superadmin') {
|
||||
return res.status(403).json({ error: 'Access denied' });
|
||||
}
|
||||
|
||||
const { tenantId } = req.params;
|
||||
const updates = req.body;
|
||||
|
||||
const tenant = await Tenant.findById(tenantId);
|
||||
if (!tenant) {
|
||||
return res.status(404).json({ error: 'Tenant not found' });
|
||||
}
|
||||
|
||||
// Update allowed fields
|
||||
['status', 'plan', 'limits', 'features'].forEach(field => {
|
||||
if (updates[field] !== undefined) {
|
||||
tenant[field] = updates[field];
|
||||
}
|
||||
});
|
||||
|
||||
await tenant.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
tenant
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Update tenant error:', error);
|
||||
res.status(500).json({ error: 'Failed to update tenant' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Delete tenant (superadmin only)
|
||||
router.delete('/:tenantId',
|
||||
authenticateToken,
|
||||
validateRequest([
|
||||
param('tenantId').isMongoId()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
if (req.user.role !== 'superadmin') {
|
||||
return res.status(403).json({ error: 'Access denied' });
|
||||
}
|
||||
|
||||
const { tenantId } = req.params;
|
||||
|
||||
const tenant = await Tenant.findById(tenantId);
|
||||
if (!tenant) {
|
||||
return res.status(404).json({ error: 'Tenant not found' });
|
||||
}
|
||||
|
||||
// Soft delete
|
||||
tenant.status = 'inactive';
|
||||
tenant.deletedAt = new Date();
|
||||
await tenant.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Tenant deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Delete tenant error:', error);
|
||||
res.status(500).json({ error: 'Failed to delete tenant' });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
382
marketing-agent/services/api-gateway/src/routes/users.js
Normal file
382
marketing-agent/services/api-gateway/src/routes/users.js
Normal file
@@ -0,0 +1,382 @@
|
||||
import express from 'express';
|
||||
import { User } from '../models/User.js';
|
||||
import { Role } from '../models/Role.js';
|
||||
import { authenticate } from '../middleware/auth.js';
|
||||
import { checkPermission, requireRole } from '../middleware/permission.js';
|
||||
import { validateRequest } from '../middleware/validation.js';
|
||||
import { body, param, query } from 'express-validator';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// List all users
|
||||
router.get('/',
|
||||
authenticate,
|
||||
checkPermission('users', 'read'),
|
||||
validateRequest([
|
||||
query('page').optional().isInt({ min: 1 }),
|
||||
query('limit').optional().isInt({ min: 1, max: 100 }),
|
||||
query('role').optional().isIn(['admin', 'manager', 'operator', 'viewer']),
|
||||
query('isActive').optional().isBoolean()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
page = 1,
|
||||
limit = 20,
|
||||
role,
|
||||
isActive,
|
||||
search
|
||||
} = req.query;
|
||||
|
||||
const filter = {};
|
||||
if (role) filter.role = role;
|
||||
if (isActive !== undefined) filter.isActive = isActive === 'true';
|
||||
if (search) {
|
||||
filter.$or = [
|
||||
{ username: { $regex: search, $options: 'i' } },
|
||||
{ email: { $regex: search, $options: 'i' } }
|
||||
];
|
||||
}
|
||||
|
||||
const users = await User.find(filter)
|
||||
.select('-password -twoFactorSecret -apiKeys.key')
|
||||
.populate('metadata.createdBy', 'username')
|
||||
.populate('metadata.updatedBy', 'username')
|
||||
.sort({ createdAt: -1 })
|
||||
.limit(limit * 1)
|
||||
.skip((page - 1) * limit);
|
||||
|
||||
const total = await User.countDocuments(filter);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
users,
|
||||
pagination: {
|
||||
page: parseInt(page),
|
||||
limit: parseInt(limit),
|
||||
total,
|
||||
pages: Math.ceil(total / limit)
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('List users error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to list users'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Get user by ID
|
||||
router.get('/:userId',
|
||||
authenticate,
|
||||
checkPermission('users', 'read'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const user = await User.findById(req.params.userId)
|
||||
.select('-password -twoFactorSecret -apiKeys.key')
|
||||
.populate('metadata.createdBy', 'username')
|
||||
.populate('metadata.updatedBy', 'username');
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: user
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Get user error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to get user'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Update user
|
||||
router.patch('/:userId',
|
||||
authenticate,
|
||||
checkPermission('users', 'update'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId(),
|
||||
body('email').optional().isEmail().normalizeEmail(),
|
||||
body('role').optional().isIn(['admin', 'manager', 'operator', 'viewer']),
|
||||
body('isActive').optional().isBoolean(),
|
||||
body('permissions').optional().isArray()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
const updates = {};
|
||||
|
||||
// Only admin can change roles
|
||||
if (req.body.role && req.user.role !== 'admin') {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Only administrators can change user roles'
|
||||
});
|
||||
}
|
||||
|
||||
// Allowed updates
|
||||
const allowedUpdates = ['email', 'role', 'isActive', 'permissions', 'preferences'];
|
||||
Object.keys(req.body).forEach(key => {
|
||||
if (allowedUpdates.includes(key)) {
|
||||
updates[key] = req.body[key];
|
||||
}
|
||||
});
|
||||
|
||||
updates['metadata.updatedBy'] = req.user._id;
|
||||
|
||||
const user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{ $set: updates },
|
||||
{ new: true, runValidators: true }
|
||||
).select('-password -twoFactorSecret');
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: user
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Update user error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to update user'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Delete user
|
||||
router.delete('/:userId',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
|
||||
// Prevent self-deletion
|
||||
if (userId === req.user._id.toString()) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Cannot delete your own account'
|
||||
});
|
||||
}
|
||||
|
||||
const user = await User.findByIdAndDelete(userId);
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'User deleted successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Delete user error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to delete user'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Reset user password
|
||||
router.post('/:userId/reset-password',
|
||||
authenticate,
|
||||
checkPermission('users', 'update'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId(),
|
||||
body('newPassword').isLength({ min: 6 })
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
const { newPassword } = req.body;
|
||||
|
||||
const user = await User.findById(userId);
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
user.password = newPassword;
|
||||
user.metadata.updatedBy = req.user._id;
|
||||
await user.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Password reset successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Reset password error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to reset password'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Grant permissions to user
|
||||
router.post('/:userId/permissions',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId(),
|
||||
body('resource').notEmpty(),
|
||||
body('actions').isArray()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
const { resource, actions } = req.body;
|
||||
|
||||
const user = await User.findById(userId);
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if permission already exists
|
||||
const existingPermission = user.permissions.find(p => p.resource === resource);
|
||||
|
||||
if (existingPermission) {
|
||||
// Update existing permission
|
||||
existingPermission.actions = [...new Set([...existingPermission.actions, ...actions])];
|
||||
} else {
|
||||
// Add new permission
|
||||
user.permissions.push({ resource, actions });
|
||||
}
|
||||
|
||||
user.metadata.updatedBy = req.user._id;
|
||||
await user.save();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: user.permissions
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Grant permissions error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to grant permissions'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Revoke permissions from user
|
||||
router.delete('/:userId/permissions/:resource',
|
||||
authenticate,
|
||||
requireRole('admin'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId(),
|
||||
param('resource').notEmpty()
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId, resource } = req.params;
|
||||
|
||||
const user = await User.findByIdAndUpdate(
|
||||
userId,
|
||||
{
|
||||
$pull: { permissions: { resource } },
|
||||
$set: { 'metadata.updatedBy': req.user._id }
|
||||
},
|
||||
{ new: true }
|
||||
).select('permissions');
|
||||
|
||||
if (!user) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'User not found'
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: user.permissions
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Revoke permissions error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to revoke permissions'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Get user activity logs
|
||||
router.get('/:userId/activity',
|
||||
authenticate,
|
||||
checkPermission('users', 'read'),
|
||||
validateRequest([
|
||||
param('userId').isMongoId(),
|
||||
query('limit').optional().isInt({ min: 1, max: 100 })
|
||||
]),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { userId } = req.params;
|
||||
const { limit = 50 } = req.query;
|
||||
|
||||
// In a real application, you would fetch from an activity log collection
|
||||
// For now, returning mock data
|
||||
const activities = [
|
||||
{
|
||||
action: 'login',
|
||||
timestamp: new Date(),
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent')
|
||||
}
|
||||
];
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: activities
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Get activity logs error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to get activity logs'
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export default router;
|
||||
1084
marketing-agent/services/api-gateway/src/routes/usersDocs.js
Normal file
1084
marketing-agent/services/api-gateway/src/routes/usersDocs.js
Normal file
File diff suppressed because it is too large
Load Diff
767
marketing-agent/services/api-gateway/src/services/backup.js
Normal file
767
marketing-agent/services/api-gateway/src/services/backup.js
Normal file
@@ -0,0 +1,767 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import archiver from 'archiver';
|
||||
import { createReadStream, createWriteStream } from 'fs';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { config } from '../config/index.js';
|
||||
import mongoose from 'mongoose';
|
||||
import Redis from 'ioredis';
|
||||
import crypto from 'crypto';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/**
|
||||
* Backup Service for data persistence and recovery
|
||||
*/
|
||||
export class BackupService {
|
||||
constructor() {
|
||||
this.backupPath = process.env.BACKUP_PATH || '/backups';
|
||||
this.encryptionKey = process.env.BACKUP_ENCRYPTION_KEY || crypto.randomBytes(32);
|
||||
this.redis = new Redis(config.redis);
|
||||
this.isRunning = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a full system backup
|
||||
*/
|
||||
async createFullBackup(options = {}) {
|
||||
if (this.isRunning) {
|
||||
throw new Error('Backup already in progress');
|
||||
}
|
||||
|
||||
this.isRunning = true;
|
||||
const backupId = `backup-${Date.now()}`;
|
||||
const tempDir = path.join(this.backupPath, 'temp', backupId);
|
||||
|
||||
try {
|
||||
logger.info('Starting full system backup', { backupId });
|
||||
|
||||
// Create temp directory
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
|
||||
// 1. Backup MongoDB
|
||||
await this.backupMongoDB(tempDir);
|
||||
|
||||
// 2. Backup Redis
|
||||
await this.backupRedis(tempDir);
|
||||
|
||||
// 3. Backup PostgreSQL
|
||||
await this.backupPostgreSQL(tempDir);
|
||||
|
||||
// 4. Backup files (sessions, uploads, etc.)
|
||||
await this.backupFiles(tempDir);
|
||||
|
||||
// 5. Create metadata
|
||||
await this.createBackupMetadata(tempDir, {
|
||||
type: 'full',
|
||||
timestamp: new Date().toISOString(),
|
||||
version: '1.0.0',
|
||||
...options
|
||||
});
|
||||
|
||||
// 6. Create archive
|
||||
const archivePath = await this.createArchive(tempDir, backupId);
|
||||
|
||||
// 7. Encrypt if requested
|
||||
if (options.encrypt) {
|
||||
await this.encryptBackup(archivePath);
|
||||
}
|
||||
|
||||
// 8. Upload to cloud if configured
|
||||
if (options.uploadToCloud) {
|
||||
await this.uploadToCloud(archivePath);
|
||||
}
|
||||
|
||||
// Clean up temp directory
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
|
||||
logger.info('Full system backup completed', { backupId, path: archivePath });
|
||||
|
||||
return {
|
||||
backupId,
|
||||
path: archivePath,
|
||||
size: (await fs.stat(archivePath)).size,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Backup failed', { error: error.message, backupId });
|
||||
|
||||
// Clean up on failure
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch (cleanupError) {
|
||||
logger.error('Failed to clean up temp directory', cleanupError);
|
||||
}
|
||||
|
||||
throw error;
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup MongoDB databases
|
||||
*/
|
||||
async backupMongoDB(targetDir) {
|
||||
const mongoDir = path.join(targetDir, 'mongodb');
|
||||
await fs.mkdir(mongoDir, { recursive: true });
|
||||
|
||||
const uri = config.mongodb.uri;
|
||||
const dumpPath = path.join(mongoDir, 'dump');
|
||||
|
||||
try {
|
||||
// Use mongodump
|
||||
const command = `mongodump --uri="${uri}" --out="${dumpPath}"`;
|
||||
const { stdout, stderr } = await execAsync(command);
|
||||
|
||||
if (stderr && !stderr.includes('done dumping')) {
|
||||
throw new Error(`MongoDB backup error: ${stderr}`);
|
||||
}
|
||||
|
||||
logger.info('MongoDB backup completed', { path: dumpPath });
|
||||
} catch (error) {
|
||||
// Fallback to manual export if mongodump is not available
|
||||
logger.warn('mongodump not available, using manual export');
|
||||
await this.manualMongoBackup(mongoDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual MongoDB backup using mongoose
|
||||
*/
|
||||
async manualMongoBackup(targetDir) {
|
||||
const db = mongoose.connection;
|
||||
const collections = await db.db.listCollections().toArray();
|
||||
|
||||
for (const collection of collections) {
|
||||
const collectionName = collection.name;
|
||||
const data = await db.db.collection(collectionName).find({}).toArray();
|
||||
|
||||
const filePath = path.join(targetDir, `${collectionName}.json`);
|
||||
await fs.writeFile(filePath, JSON.stringify(data, null, 2));
|
||||
|
||||
logger.info(`Exported collection: ${collectionName}`, { count: data.length });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup Redis data
|
||||
*/
|
||||
async backupRedis(targetDir) {
|
||||
const redisDir = path.join(targetDir, 'redis');
|
||||
await fs.mkdir(redisDir, { recursive: true });
|
||||
|
||||
try {
|
||||
// Get all keys
|
||||
const keys = await this.redis.keys('*');
|
||||
const backup = {};
|
||||
|
||||
for (const key of keys) {
|
||||
const type = await this.redis.type(key);
|
||||
let value;
|
||||
|
||||
switch (type) {
|
||||
case 'string':
|
||||
value = await this.redis.get(key);
|
||||
break;
|
||||
case 'list':
|
||||
value = await this.redis.lrange(key, 0, -1);
|
||||
break;
|
||||
case 'set':
|
||||
value = await this.redis.smembers(key);
|
||||
break;
|
||||
case 'zset':
|
||||
value = await this.redis.zrange(key, 0, -1, 'WITHSCORES');
|
||||
break;
|
||||
case 'hash':
|
||||
value = await this.redis.hgetall(key);
|
||||
break;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
const ttl = await this.redis.ttl(key);
|
||||
backup[key] = { type, value, ttl: ttl > 0 ? ttl : null };
|
||||
}
|
||||
|
||||
const backupPath = path.join(redisDir, 'redis-backup.json');
|
||||
await fs.writeFile(backupPath, JSON.stringify(backup, null, 2));
|
||||
|
||||
logger.info('Redis backup completed', { keys: keys.length });
|
||||
} catch (error) {
|
||||
logger.error('Redis backup failed', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup PostgreSQL database
|
||||
*/
|
||||
async backupPostgreSQL(targetDir) {
|
||||
const pgDir = path.join(targetDir, 'postgresql');
|
||||
await fs.mkdir(pgDir, { recursive: true });
|
||||
|
||||
const pgConfig = {
|
||||
host: process.env.POSTGRES_HOST || 'postgres',
|
||||
port: process.env.POSTGRES_PORT || 5432,
|
||||
database: process.env.POSTGRES_DB || 'marketing_agent',
|
||||
user: process.env.POSTGRES_USER || 'postgres',
|
||||
password: process.env.POSTGRES_PASSWORD
|
||||
};
|
||||
|
||||
const dumpPath = path.join(pgDir, 'database.sql');
|
||||
|
||||
try {
|
||||
const command = `PGPASSWORD="${pgConfig.password}" pg_dump -h ${pgConfig.host} -p ${pgConfig.port} -U ${pgConfig.user} -d ${pgConfig.database} -f "${dumpPath}"`;
|
||||
const { stdout, stderr } = await execAsync(command);
|
||||
|
||||
if (stderr) {
|
||||
logger.warn('PostgreSQL backup warnings', { stderr });
|
||||
}
|
||||
|
||||
logger.info('PostgreSQL backup completed', { path: dumpPath });
|
||||
} catch (error) {
|
||||
logger.error('PostgreSQL backup failed', error);
|
||||
// Continue without PostgreSQL backup if it fails
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup important files
|
||||
*/
|
||||
async backupFiles(targetDir) {
|
||||
const filesDir = path.join(targetDir, 'files');
|
||||
await fs.mkdir(filesDir, { recursive: true });
|
||||
|
||||
const filesToBackup = [
|
||||
{
|
||||
source: '/app/gramjs_sessions',
|
||||
dest: 'gramjs_sessions',
|
||||
description: 'Telegram sessions'
|
||||
},
|
||||
{
|
||||
source: '/app/uploads',
|
||||
dest: 'uploads',
|
||||
description: 'User uploads'
|
||||
},
|
||||
{
|
||||
source: '/app/templates',
|
||||
dest: 'templates',
|
||||
description: 'Message templates'
|
||||
}
|
||||
];
|
||||
|
||||
for (const file of filesToBackup) {
|
||||
try {
|
||||
const sourcePath = file.source;
|
||||
const destPath = path.join(filesDir, file.dest);
|
||||
|
||||
// Check if source exists
|
||||
await fs.access(sourcePath);
|
||||
|
||||
// Copy directory
|
||||
await this.copyDirectory(sourcePath, destPath);
|
||||
|
||||
logger.info(`Backed up ${file.description}`, {
|
||||
source: sourcePath,
|
||||
dest: destPath
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to backup ${file.description}`, {
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy directory recursively
|
||||
*/
|
||||
async copyDirectory(source, destination) {
|
||||
await fs.mkdir(destination, { recursive: true });
|
||||
|
||||
const entries = await fs.readdir(source, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const sourcePath = path.join(source, entry.name);
|
||||
const destPath = path.join(destination, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await this.copyDirectory(sourcePath, destPath);
|
||||
} else {
|
||||
await fs.copyFile(sourcePath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create backup metadata
|
||||
*/
|
||||
async createBackupMetadata(targetDir, metadata) {
|
||||
const metadataPath = path.join(targetDir, 'metadata.json');
|
||||
|
||||
const fullMetadata = {
|
||||
...metadata,
|
||||
services: {
|
||||
mongodb: { version: await this.getMongoVersion() },
|
||||
redis: { version: await this.getRedisVersion() },
|
||||
postgresql: { version: await this.getPostgresVersion() }
|
||||
},
|
||||
system: {
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
nodeVersion: process.version
|
||||
}
|
||||
};
|
||||
|
||||
await fs.writeFile(metadataPath, JSON.stringify(fullMetadata, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create compressed archive
|
||||
*/
|
||||
async createArchive(sourceDir, backupId) {
|
||||
const archivePath = path.join(this.backupPath, `${backupId}.tar.gz`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = createWriteStream(archivePath);
|
||||
const archive = archiver('tar', {
|
||||
gzip: true,
|
||||
gzipOptions: { level: 9 }
|
||||
});
|
||||
|
||||
output.on('close', () => {
|
||||
logger.info('Archive created', {
|
||||
path: archivePath,
|
||||
size: archive.pointer()
|
||||
});
|
||||
resolve(archivePath);
|
||||
});
|
||||
|
||||
archive.on('error', reject);
|
||||
archive.pipe(output);
|
||||
archive.directory(sourceDir, false);
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore from backup
|
||||
*/
|
||||
async restoreFromBackup(backupPath, options = {}) {
|
||||
if (this.isRunning) {
|
||||
throw new Error('Restore already in progress');
|
||||
}
|
||||
|
||||
this.isRunning = true;
|
||||
const tempDir = path.join(this.backupPath, 'temp', `restore-${Date.now()}`);
|
||||
|
||||
try {
|
||||
logger.info('Starting system restore', { backupPath });
|
||||
|
||||
// 1. Extract archive
|
||||
await this.extractArchive(backupPath, tempDir);
|
||||
|
||||
// 2. Verify metadata
|
||||
const metadata = await this.verifyBackupMetadata(tempDir);
|
||||
|
||||
// 3. Restore MongoDB
|
||||
if (!options.skipMongoDB) {
|
||||
await this.restoreMongoDB(tempDir);
|
||||
}
|
||||
|
||||
// 4. Restore Redis
|
||||
if (!options.skipRedis) {
|
||||
await this.restoreRedis(tempDir);
|
||||
}
|
||||
|
||||
// 5. Restore PostgreSQL
|
||||
if (!options.skipPostgreSQL) {
|
||||
await this.restorePostgreSQL(tempDir);
|
||||
}
|
||||
|
||||
// 6. Restore files
|
||||
if (!options.skipFiles) {
|
||||
await this.restoreFiles(tempDir);
|
||||
}
|
||||
|
||||
// Clean up
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
|
||||
logger.info('System restore completed', { backupPath });
|
||||
|
||||
return {
|
||||
success: true,
|
||||
metadata,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Restore failed', { error: error.message });
|
||||
|
||||
// Clean up on failure
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch (cleanupError) {
|
||||
logger.error('Failed to clean up temp directory', cleanupError);
|
||||
}
|
||||
|
||||
throw error;
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get service versions
|
||||
*/
|
||||
async getMongoVersion() {
|
||||
try {
|
||||
const info = await mongoose.connection.db.admin().serverInfo();
|
||||
return info.version;
|
||||
} catch (error) {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
async getRedisVersion() {
|
||||
try {
|
||||
const info = await this.redis.info('server');
|
||||
const match = info.match(/redis_version:(.+)/);
|
||||
return match ? match[1].trim() : 'unknown';
|
||||
} catch (error) {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
async getPostgresVersion() {
|
||||
try {
|
||||
const { stdout } = await execAsync('psql --version');
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload backup to cloud storage
|
||||
*/
|
||||
async uploadToCloud(backupPath) {
|
||||
// Implement cloud storage upload (S3, GCS, Azure Blob, etc.)
|
||||
logger.info('Cloud upload not implemented', { backupPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* List available backups
|
||||
*/
|
||||
async listBackups() {
|
||||
try {
|
||||
const files = await fs.readdir(this.backupPath);
|
||||
const backups = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.tar.gz')) {
|
||||
const filePath = path.join(this.backupPath, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
backups.push({
|
||||
id: file.replace('.tar.gz', ''),
|
||||
filename: file,
|
||||
path: filePath,
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return backups.sort((a, b) => b.created - a.created);
|
||||
} catch (error) {
|
||||
logger.error('Failed to list backups', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete old backups based on retention policy
|
||||
*/
|
||||
async cleanupOldBackups(retentionDays = 30) {
|
||||
const backups = await this.listBackups();
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
||||
|
||||
let deletedCount = 0;
|
||||
let freedSpace = 0;
|
||||
|
||||
for (const backup of backups) {
|
||||
if (backup.created < cutoffDate) {
|
||||
try {
|
||||
await fs.unlink(backup.path);
|
||||
deletedCount++;
|
||||
freedSpace += backup.size;
|
||||
logger.info('Deleted old backup', { id: backup.id });
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete backup', {
|
||||
id: backup.id,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { deletedCount, freedSpace };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract archive
|
||||
*/
|
||||
async extractArchive(archivePath, targetDir) {
|
||||
await fs.mkdir(targetDir, { recursive: true });
|
||||
|
||||
const command = `tar -xzf "${archivePath}" -C "${targetDir}"`;
|
||||
await execAsync(command);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify backup metadata
|
||||
*/
|
||||
async verifyBackupMetadata(backupDir) {
|
||||
const metadataPath = path.join(backupDir, 'metadata.json');
|
||||
const metadata = JSON.parse(await fs.readFile(metadataPath, 'utf8'));
|
||||
|
||||
// Verify compatibility
|
||||
// Add version checks here if needed
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore MongoDB
|
||||
*/
|
||||
async restoreMongoDB(backupDir) {
|
||||
const mongoDir = path.join(backupDir, 'mongodb');
|
||||
const dumpPath = path.join(mongoDir, 'dump');
|
||||
|
||||
try {
|
||||
// Check if mongodump backup exists
|
||||
await fs.access(dumpPath);
|
||||
|
||||
const uri = config.mongodb.uri;
|
||||
const command = `mongorestore --uri="${uri}" --dir="${dumpPath}" --drop`;
|
||||
await execAsync(command);
|
||||
|
||||
logger.info('MongoDB restore completed');
|
||||
} catch (error) {
|
||||
// Try manual restore
|
||||
logger.warn('Using manual MongoDB restore');
|
||||
await this.manualMongoRestore(mongoDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual MongoDB restore
|
||||
*/
|
||||
async manualMongoRestore(mongoDir) {
|
||||
const db = mongoose.connection.db;
|
||||
const files = await fs.readdir(mongoDir);
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.json')) {
|
||||
const collectionName = file.replace('.json', '');
|
||||
const data = JSON.parse(
|
||||
await fs.readFile(path.join(mongoDir, file), 'utf8')
|
||||
);
|
||||
|
||||
// Drop existing collection
|
||||
try {
|
||||
await db.dropCollection(collectionName);
|
||||
} catch (error) {
|
||||
// Collection might not exist
|
||||
}
|
||||
|
||||
// Insert data
|
||||
if (data.length > 0) {
|
||||
await db.collection(collectionName).insertMany(data);
|
||||
logger.info(`Restored collection: ${collectionName}`, {
|
||||
count: data.length
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore Redis
|
||||
*/
|
||||
async restoreRedis(backupDir) {
|
||||
const backupPath = path.join(backupDir, 'redis', 'redis-backup.json');
|
||||
const backup = JSON.parse(await fs.readFile(backupPath, 'utf8'));
|
||||
|
||||
// Clear existing data
|
||||
await this.redis.flushall();
|
||||
|
||||
// Restore each key
|
||||
for (const [key, data] of Object.entries(backup)) {
|
||||
const { type, value, ttl } = data;
|
||||
|
||||
switch (type) {
|
||||
case 'string':
|
||||
await this.redis.set(key, value);
|
||||
break;
|
||||
case 'list':
|
||||
if (value.length > 0) {
|
||||
await this.redis.rpush(key, ...value);
|
||||
}
|
||||
break;
|
||||
case 'set':
|
||||
if (value.length > 0) {
|
||||
await this.redis.sadd(key, ...value);
|
||||
}
|
||||
break;
|
||||
case 'zset':
|
||||
if (value.length > 0) {
|
||||
await this.redis.zadd(key, ...value);
|
||||
}
|
||||
break;
|
||||
case 'hash':
|
||||
if (Object.keys(value).length > 0) {
|
||||
await this.redis.hmset(key, value);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Restore TTL if exists
|
||||
if (ttl) {
|
||||
await this.redis.expire(key, ttl);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Redis restore completed', {
|
||||
keys: Object.keys(backup).length
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore PostgreSQL
|
||||
*/
|
||||
async restorePostgreSQL(backupDir) {
|
||||
const sqlPath = path.join(backupDir, 'postgresql', 'database.sql');
|
||||
|
||||
try {
|
||||
await fs.access(sqlPath);
|
||||
|
||||
const pgConfig = {
|
||||
host: process.env.POSTGRES_HOST || 'postgres',
|
||||
port: process.env.POSTGRES_PORT || 5432,
|
||||
database: process.env.POSTGRES_DB || 'marketing_agent',
|
||||
user: process.env.POSTGRES_USER || 'postgres',
|
||||
password: process.env.POSTGRES_PASSWORD
|
||||
};
|
||||
|
||||
const command = `PGPASSWORD="${pgConfig.password}" psql -h ${pgConfig.host} -p ${pgConfig.port} -U ${pgConfig.user} -d ${pgConfig.database} -f "${sqlPath}"`;
|
||||
await execAsync(command);
|
||||
|
||||
logger.info('PostgreSQL restore completed');
|
||||
} catch (error) {
|
||||
logger.warn('PostgreSQL restore skipped', { error: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore files
|
||||
*/
|
||||
async restoreFiles(backupDir) {
|
||||
const filesDir = path.join(backupDir, 'files');
|
||||
|
||||
const filesToRestore = [
|
||||
{
|
||||
source: 'gramjs_sessions',
|
||||
dest: '/app/gramjs_sessions',
|
||||
description: 'Telegram sessions'
|
||||
},
|
||||
{
|
||||
source: 'uploads',
|
||||
dest: '/app/uploads',
|
||||
description: 'User uploads'
|
||||
},
|
||||
{
|
||||
source: 'templates',
|
||||
dest: '/app/templates',
|
||||
description: 'Message templates'
|
||||
}
|
||||
];
|
||||
|
||||
for (const file of filesToRestore) {
|
||||
try {
|
||||
const sourcePath = path.join(filesDir, file.source);
|
||||
const destPath = file.dest;
|
||||
|
||||
// Check if source exists
|
||||
await fs.access(sourcePath);
|
||||
|
||||
// Remove existing directory
|
||||
try {
|
||||
await fs.rm(destPath, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
// Directory might not exist
|
||||
}
|
||||
|
||||
// Copy directory
|
||||
await this.copyDirectory(sourcePath, destPath);
|
||||
|
||||
logger.info(`Restored ${file.description}`, {
|
||||
source: sourcePath,
|
||||
dest: destPath
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to restore ${file.description}`, {
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt backup file
|
||||
*/
|
||||
async encryptBackup(backupPath) {
|
||||
const algorithm = 'aes-256-gcm';
|
||||
const iv = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv(algorithm, this.encryptionKey, iv);
|
||||
|
||||
const input = createReadStream(backupPath);
|
||||
const output = createWriteStream(`${backupPath}.enc`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
input.pipe(cipher).pipe(output);
|
||||
|
||||
output.on('finish', async () => {
|
||||
// Save IV and auth tag
|
||||
const authTag = cipher.getAuthTag();
|
||||
const metadata = {
|
||||
iv: iv.toString('hex'),
|
||||
authTag: authTag.toString('hex')
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
`${backupPath}.enc.metadata`,
|
||||
JSON.stringify(metadata)
|
||||
);
|
||||
|
||||
// Remove unencrypted file
|
||||
await fs.unlink(backupPath);
|
||||
|
||||
logger.info('Backup encrypted', { path: `${backupPath}.enc` });
|
||||
resolve(`${backupPath}.enc`);
|
||||
});
|
||||
|
||||
output.on('error', reject);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const backupService = new BackupService();
|
||||
@@ -0,0 +1,676 @@
|
||||
import { Parser } from 'json2csv';
|
||||
import csvParser from 'csv-parser';
|
||||
import Excel from 'exceljs';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { User } from '../models/User.js';
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
/**
|
||||
* Data Import/Export Service
|
||||
*/
|
||||
export class DataExchangeService {
|
||||
constructor() {
|
||||
this.supportedFormats = ['csv', 'json', 'excel'];
|
||||
this.exportConfigs = this.initializeExportConfigs();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize export configurations for different entities
|
||||
*/
|
||||
initializeExportConfigs() {
|
||||
return {
|
||||
users: {
|
||||
model: 'User',
|
||||
fields: ['username', 'email', 'role', 'status', 'lastLogin', 'createdAt'],
|
||||
headers: {
|
||||
username: 'Username',
|
||||
email: 'Email',
|
||||
role: 'Role',
|
||||
status: 'Status',
|
||||
lastLogin: 'Last Login',
|
||||
createdAt: 'Created Date'
|
||||
}
|
||||
},
|
||||
campaigns: {
|
||||
model: 'Campaign',
|
||||
fields: ['name', 'status', 'startDate', 'endDate', 'targetAudience', 'messageSent', 'conversions'],
|
||||
headers: {
|
||||
name: 'Campaign Name',
|
||||
status: 'Status',
|
||||
startDate: 'Start Date',
|
||||
endDate: 'End Date',
|
||||
targetAudience: 'Target Audience',
|
||||
messageSent: 'Messages Sent',
|
||||
conversions: 'Conversions'
|
||||
}
|
||||
},
|
||||
messages: {
|
||||
model: 'Message',
|
||||
fields: ['campaignId', 'recipientId', 'content', 'status', 'sentAt', 'deliveredAt', 'readAt'],
|
||||
headers: {
|
||||
campaignId: 'Campaign ID',
|
||||
recipientId: 'Recipient ID',
|
||||
content: 'Content',
|
||||
status: 'Status',
|
||||
sentAt: 'Sent Time',
|
||||
deliveredAt: 'Delivered Time',
|
||||
readAt: 'Read Time'
|
||||
}
|
||||
},
|
||||
contacts: {
|
||||
model: 'Contact',
|
||||
fields: ['telegramId', 'username', 'firstName', 'lastName', 'phoneNumber', 'tags', 'groups'],
|
||||
headers: {
|
||||
telegramId: 'Telegram ID',
|
||||
username: 'Username',
|
||||
firstName: 'First Name',
|
||||
lastName: 'Last Name',
|
||||
phoneNumber: 'Phone Number',
|
||||
tags: 'Tags',
|
||||
groups: 'Groups'
|
||||
}
|
||||
},
|
||||
analytics: {
|
||||
model: 'Analytics',
|
||||
fields: ['date', 'messagesSent', 'messagesDelivered', 'messagesRead', 'conversions', 'revenue'],
|
||||
headers: {
|
||||
date: 'Date',
|
||||
messagesSent: 'Messages Sent',
|
||||
messagesDelivered: 'Messages Delivered',
|
||||
messagesRead: 'Messages Read',
|
||||
conversions: 'Conversions',
|
||||
revenue: 'Revenue'
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Export data to specified format
|
||||
*/
|
||||
async exportData(entityType, format, filters = {}, options = {}) {
|
||||
try {
|
||||
logger.info(`Exporting ${entityType} data to ${format}`, { filters });
|
||||
|
||||
// Validate entity type
|
||||
if (!this.exportConfigs[entityType]) {
|
||||
throw new Error(`Unsupported entity type: ${entityType}`);
|
||||
}
|
||||
|
||||
// Validate format
|
||||
if (!this.supportedFormats.includes(format)) {
|
||||
throw new Error(`Unsupported format: ${format}`);
|
||||
}
|
||||
|
||||
// Get data
|
||||
const data = await this.fetchData(entityType, filters, options);
|
||||
|
||||
// Convert to requested format
|
||||
let result;
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
result = await this.exportToCSV(data, entityType);
|
||||
break;
|
||||
case 'json':
|
||||
result = await this.exportToJSON(data, entityType);
|
||||
break;
|
||||
case 'excel':
|
||||
result = await this.exportToExcel(data, entityType);
|
||||
break;
|
||||
}
|
||||
|
||||
logger.info(`Successfully exported ${data.length} ${entityType} records`);
|
||||
|
||||
return {
|
||||
data: result,
|
||||
count: data.length,
|
||||
format,
|
||||
mimeType: this.getMimeType(format),
|
||||
filename: this.generateFilename(entityType, format)
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Export failed', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Import data from file
|
||||
*/
|
||||
async importData(entityType, fileBuffer, format, options = {}) {
|
||||
try {
|
||||
logger.info(`Importing ${entityType} data from ${format}`);
|
||||
|
||||
// Validate entity type
|
||||
if (!this.exportConfigs[entityType]) {
|
||||
throw new Error(`Unsupported entity type: ${entityType}`);
|
||||
}
|
||||
|
||||
// Parse file based on format
|
||||
let parsedData;
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
parsedData = await this.parseCSV(fileBuffer);
|
||||
break;
|
||||
case 'json':
|
||||
parsedData = await this.parseJSON(fileBuffer);
|
||||
break;
|
||||
case 'excel':
|
||||
parsedData = await this.parseExcel(fileBuffer);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported format: ${format}`);
|
||||
}
|
||||
|
||||
// Validate data
|
||||
const validationResult = await this.validateImportData(entityType, parsedData);
|
||||
if (!validationResult.valid) {
|
||||
return {
|
||||
success: false,
|
||||
errors: validationResult.errors,
|
||||
validRecords: 0,
|
||||
invalidRecords: validationResult.errors.length
|
||||
};
|
||||
}
|
||||
|
||||
// Import data
|
||||
const importResult = await this.processImport(entityType, parsedData, options);
|
||||
|
||||
logger.info(`Import completed`, importResult);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
...importResult
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Import failed', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch data from database
|
||||
*/
|
||||
async fetchData(entityType, filters = {}, options = {}) {
|
||||
const config = this.exportConfigs[entityType];
|
||||
const Model = mongoose.model(config.model);
|
||||
|
||||
const query = Model.find(filters);
|
||||
|
||||
// Apply field selection
|
||||
if (config.fields) {
|
||||
query.select(config.fields.join(' '));
|
||||
}
|
||||
|
||||
// Apply sorting
|
||||
if (options.sort) {
|
||||
query.sort(options.sort);
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
if (options.limit) {
|
||||
query.limit(options.limit);
|
||||
}
|
||||
if (options.skip) {
|
||||
query.skip(options.skip);
|
||||
}
|
||||
|
||||
const data = await query.lean().exec();
|
||||
|
||||
// Transform data if needed
|
||||
return this.transformExportData(data, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform data for export
|
||||
*/
|
||||
transformExportData(data, entityType) {
|
||||
return data.map(record => {
|
||||
const transformed = {};
|
||||
const config = this.exportConfigs[entityType];
|
||||
|
||||
config.fields.forEach(field => {
|
||||
let value = record[field];
|
||||
|
||||
// Handle special field types
|
||||
if (value instanceof Date) {
|
||||
value = value.toISOString();
|
||||
} else if (Array.isArray(value)) {
|
||||
value = value.join(', ');
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
transformed[field] = value;
|
||||
});
|
||||
|
||||
return transformed;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Export to CSV format
|
||||
*/
|
||||
async exportToCSV(data, entityType) {
|
||||
const config = this.exportConfigs[entityType];
|
||||
const fields = config.fields.map(field => ({
|
||||
label: config.headers[field] || field,
|
||||
value: field
|
||||
}));
|
||||
|
||||
const parser = new Parser({ fields });
|
||||
return parser.parse(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export to JSON format
|
||||
*/
|
||||
async exportToJSON(data, entityType) {
|
||||
return JSON.stringify({
|
||||
entityType,
|
||||
exportDate: new Date().toISOString(),
|
||||
count: data.length,
|
||||
data
|
||||
}, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Export to Excel format
|
||||
*/
|
||||
async exportToExcel(data, entityType) {
|
||||
const workbook = new Excel.Workbook();
|
||||
const worksheet = workbook.addWorksheet(entityType);
|
||||
|
||||
const config = this.exportConfigs[entityType];
|
||||
|
||||
// Add headers
|
||||
worksheet.columns = config.fields.map(field => ({
|
||||
header: config.headers[field] || field,
|
||||
key: field,
|
||||
width: 20
|
||||
}));
|
||||
|
||||
// Add data
|
||||
worksheet.addRows(data);
|
||||
|
||||
// Style the header row
|
||||
worksheet.getRow(1).font = { bold: true };
|
||||
worksheet.getRow(1).fill = {
|
||||
type: 'pattern',
|
||||
pattern: 'solid',
|
||||
fgColor: { argb: 'FFE0E0E0' }
|
||||
};
|
||||
|
||||
// Generate buffer
|
||||
return await workbook.xlsx.writeBuffer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse CSV file
|
||||
*/
|
||||
async parseCSV(buffer) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const results = [];
|
||||
const stream = require('stream');
|
||||
const bufferStream = new stream.PassThrough();
|
||||
bufferStream.end(buffer);
|
||||
|
||||
bufferStream
|
||||
.pipe(csvParser())
|
||||
.on('data', (data) => results.push(data))
|
||||
.on('end', () => resolve(results))
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JSON file
|
||||
*/
|
||||
async parseJSON(buffer) {
|
||||
const jsonStr = buffer.toString('utf8');
|
||||
const parsed = JSON.parse(jsonStr);
|
||||
|
||||
// Handle both array and object with data property
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed;
|
||||
} else if (parsed.data && Array.isArray(parsed.data)) {
|
||||
return parsed.data;
|
||||
} else {
|
||||
throw new Error('Invalid JSON format. Expected array or object with data property');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse Excel file
|
||||
*/
|
||||
async parseExcel(buffer) {
|
||||
const workbook = new Excel.Workbook();
|
||||
await workbook.xlsx.load(buffer);
|
||||
|
||||
const worksheet = workbook.worksheets[0];
|
||||
if (!worksheet) {
|
||||
throw new Error('No worksheet found in Excel file');
|
||||
}
|
||||
|
||||
const data = [];
|
||||
const headers = [];
|
||||
|
||||
// Get headers from first row
|
||||
worksheet.getRow(1).eachCell((cell, colNumber) => {
|
||||
headers[colNumber - 1] = cell.value;
|
||||
});
|
||||
|
||||
// Get data from remaining rows
|
||||
worksheet.eachRow((row, rowNumber) => {
|
||||
if (rowNumber === 1) return; // Skip header row
|
||||
|
||||
const record = {};
|
||||
row.eachCell((cell, colNumber) => {
|
||||
const header = headers[colNumber - 1];
|
||||
if (header) {
|
||||
record[header] = cell.value;
|
||||
}
|
||||
});
|
||||
|
||||
if (Object.keys(record).length > 0) {
|
||||
data.push(record);
|
||||
}
|
||||
});
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate import data
|
||||
*/
|
||||
async validateImportData(entityType, data) {
|
||||
const errors = [];
|
||||
const config = this.exportConfigs[entityType];
|
||||
|
||||
data.forEach((record, index) => {
|
||||
const recordErrors = [];
|
||||
|
||||
// Check required fields
|
||||
const requiredFields = this.getRequiredFields(entityType);
|
||||
requiredFields.forEach(field => {
|
||||
if (!record[field]) {
|
||||
recordErrors.push(`Missing required field: ${field}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Validate data types and formats
|
||||
Object.entries(record).forEach(([field, value]) => {
|
||||
const validationError = this.validateField(entityType, field, value);
|
||||
if (validationError) {
|
||||
recordErrors.push(validationError);
|
||||
}
|
||||
});
|
||||
|
||||
if (recordErrors.length > 0) {
|
||||
errors.push({
|
||||
row: index + 1,
|
||||
errors: recordErrors,
|
||||
data: record
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process import
|
||||
*/
|
||||
async processImport(entityType, data, options = {}) {
|
||||
const Model = mongoose.model(this.exportConfigs[entityType].model);
|
||||
const results = {
|
||||
created: 0,
|
||||
updated: 0,
|
||||
skipped: 0,
|
||||
errors: []
|
||||
};
|
||||
|
||||
// Use bulk operations for better performance
|
||||
const bulkOps = [];
|
||||
|
||||
for (const record of data) {
|
||||
try {
|
||||
// Transform import data
|
||||
const transformed = await this.transformImportData(entityType, record);
|
||||
|
||||
if (options.updateExisting) {
|
||||
// Update or insert
|
||||
const filter = this.getUniqueFilter(entityType, transformed);
|
||||
bulkOps.push({
|
||||
updateOne: {
|
||||
filter,
|
||||
update: { $set: transformed },
|
||||
upsert: true
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Insert only
|
||||
bulkOps.push({
|
||||
insertOne: {
|
||||
document: transformed
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
results.errors.push({
|
||||
record,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (bulkOps.length > 0) {
|
||||
const bulkResult = await Model.bulkWrite(bulkOps, { ordered: false });
|
||||
results.created = bulkResult.insertedCount || 0;
|
||||
results.updated = bulkResult.modifiedCount || 0;
|
||||
results.skipped = data.length - results.created - results.updated - results.errors.length;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform import data
|
||||
*/
|
||||
async transformImportData(entityType, record) {
|
||||
const transformed = {};
|
||||
|
||||
// Map headers back to field names
|
||||
const config = this.exportConfigs[entityType];
|
||||
const reverseHeaders = {};
|
||||
Object.entries(config.headers).forEach(([field, header]) => {
|
||||
reverseHeaders[header] = field;
|
||||
});
|
||||
|
||||
Object.entries(record).forEach(([key, value]) => {
|
||||
const field = reverseHeaders[key] || key;
|
||||
|
||||
// Handle special transformations
|
||||
if (field === 'tags' || field === 'groups') {
|
||||
// Convert comma-separated string to array
|
||||
if (typeof value === 'string') {
|
||||
value = value.split(',').map(v => v.trim()).filter(v => v);
|
||||
}
|
||||
} else if (field.endsWith('Date') || field.endsWith('At')) {
|
||||
// Parse dates
|
||||
if (value && typeof value === 'string') {
|
||||
value = new Date(value);
|
||||
}
|
||||
}
|
||||
|
||||
transformed[field] = value;
|
||||
});
|
||||
|
||||
return transformed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get required fields for entity type
|
||||
*/
|
||||
getRequiredFields(entityType) {
|
||||
const requiredFieldsMap = {
|
||||
users: ['username', 'email'],
|
||||
campaigns: ['name'],
|
||||
messages: ['campaignId', 'recipientId', 'content'],
|
||||
contacts: ['telegramId'],
|
||||
analytics: ['date']
|
||||
};
|
||||
|
||||
return requiredFieldsMap[entityType] || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate field value
|
||||
*/
|
||||
validateField(entityType, field, value) {
|
||||
// Email validation
|
||||
if (field === 'email') {
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
if (!emailRegex.test(value)) {
|
||||
return `Invalid email format: ${value}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Date validation
|
||||
if (field.endsWith('Date') || field.endsWith('At')) {
|
||||
const date = new Date(value);
|
||||
if (isNaN(date.getTime())) {
|
||||
return `Invalid date format for ${field}: ${value}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Status validation
|
||||
if (field === 'status') {
|
||||
const validStatuses = {
|
||||
users: ['active', 'inactive', 'banned'],
|
||||
campaigns: ['draft', 'active', 'paused', 'completed'],
|
||||
messages: ['pending', 'sent', 'delivered', 'failed', 'read']
|
||||
};
|
||||
|
||||
const validValues = validStatuses[entityType];
|
||||
if (validValues && !validValues.includes(value)) {
|
||||
return `Invalid status: ${value}. Must be one of: ${validValues.join(', ')}`;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unique filter for entity
|
||||
*/
|
||||
getUniqueFilter(entityType, record) {
|
||||
const uniqueFieldsMap = {
|
||||
users: { email: record.email },
|
||||
campaigns: { name: record.name },
|
||||
messages: { campaignId: record.campaignId, recipientId: record.recipientId },
|
||||
contacts: { telegramId: record.telegramId },
|
||||
analytics: { date: record.date }
|
||||
};
|
||||
|
||||
return uniqueFieldsMap[entityType] || { _id: record._id };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MIME type for format
|
||||
*/
|
||||
getMimeType(format) {
|
||||
const mimeTypes = {
|
||||
csv: 'text/csv',
|
||||
json: 'application/json',
|
||||
excel: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
||||
};
|
||||
|
||||
return mimeTypes[format] || 'application/octet-stream';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate filename
|
||||
*/
|
||||
generateFilename(entityType, format) {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const extension = format === 'excel' ? 'xlsx' : format;
|
||||
return `${entityType}-export-${timestamp}.${extension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get export templates
|
||||
*/
|
||||
getExportTemplates() {
|
||||
const templates = {};
|
||||
|
||||
Object.entries(this.exportConfigs).forEach(([entityType, config]) => {
|
||||
templates[entityType] = {
|
||||
fields: config.fields,
|
||||
headers: config.headers,
|
||||
sampleData: this.generateSampleData(entityType)
|
||||
};
|
||||
});
|
||||
|
||||
return templates;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate sample data for templates
|
||||
*/
|
||||
generateSampleData(entityType) {
|
||||
const samples = {
|
||||
users: [{
|
||||
username: 'johndoe',
|
||||
email: 'john@example.com',
|
||||
role: 'user',
|
||||
status: 'active',
|
||||
lastLogin: new Date().toISOString(),
|
||||
createdAt: new Date().toISOString()
|
||||
}],
|
||||
campaigns: [{
|
||||
name: 'Summer Sale 2024',
|
||||
status: 'active',
|
||||
startDate: new Date().toISOString(),
|
||||
endDate: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000).toISOString(),
|
||||
targetAudience: 'All Users',
|
||||
messageSent: 1000,
|
||||
conversions: 50
|
||||
}],
|
||||
messages: [{
|
||||
campaignId: 'campaign_123',
|
||||
recipientId: 'user_456',
|
||||
content: 'Check out our summer sale!',
|
||||
status: 'delivered',
|
||||
sentAt: new Date().toISOString(),
|
||||
deliveredAt: new Date().toISOString(),
|
||||
readAt: null
|
||||
}],
|
||||
contacts: [{
|
||||
telegramId: '123456789',
|
||||
username: 'telegram_user',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
phoneNumber: '+1234567890',
|
||||
tags: 'customer, vip',
|
||||
groups: 'sales, newsletter'
|
||||
}],
|
||||
analytics: [{
|
||||
date: new Date().toISOString(),
|
||||
messagesSent: 1000,
|
||||
messagesDelivered: 950,
|
||||
messagesRead: 800,
|
||||
conversions: 50,
|
||||
revenue: 5000
|
||||
}]
|
||||
};
|
||||
|
||||
return samples[entityType] || [];
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const dataExchangeService = new DataExchangeService();
|
||||
374
marketing-agent/services/api-gateway/src/services/monitoring.js
Normal file
374
marketing-agent/services/api-gateway/src/services/monitoring.js
Normal file
@@ -0,0 +1,374 @@
|
||||
import os from 'os';
|
||||
import promClient from 'prom-client';
|
||||
import { EventEmitter } from 'events';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
import axios from 'axios';
|
||||
|
||||
// Create a registry for Prometheus metrics
|
||||
const register = new promClient.Registry();
|
||||
|
||||
// Enable default metrics
|
||||
promClient.collectDefaultMetrics({
|
||||
register,
|
||||
prefix: 'marketing_agent_'
|
||||
});
|
||||
|
||||
// Custom metrics
|
||||
const metrics = {
|
||||
// HTTP metrics
|
||||
httpRequestDuration: new promClient.Histogram({
|
||||
name: 'http_request_duration_seconds',
|
||||
help: 'Duration of HTTP requests in seconds',
|
||||
labelNames: ['method', 'route', 'status_code'],
|
||||
buckets: [0.1, 0.3, 0.5, 0.7, 1, 3, 5, 7, 10]
|
||||
}),
|
||||
|
||||
httpRequestTotal: new promClient.Counter({
|
||||
name: 'http_requests_total',
|
||||
help: 'Total number of HTTP requests',
|
||||
labelNames: ['method', 'route', 'status_code']
|
||||
}),
|
||||
|
||||
httpRequestErrors: new promClient.Counter({
|
||||
name: 'http_request_errors_total',
|
||||
help: 'Total number of HTTP request errors',
|
||||
labelNames: ['method', 'route', 'error_type']
|
||||
}),
|
||||
|
||||
// Business metrics
|
||||
campaignsActive: new promClient.Gauge({
|
||||
name: 'campaigns_active',
|
||||
help: 'Number of active campaigns'
|
||||
}),
|
||||
|
||||
messagesSent: new promClient.Counter({
|
||||
name: 'messages_sent_total',
|
||||
help: 'Total number of messages sent',
|
||||
labelNames: ['campaign_id', 'status']
|
||||
}),
|
||||
|
||||
messageSendDuration: new promClient.Histogram({
|
||||
name: 'message_send_duration_seconds',
|
||||
help: 'Duration of message sending operations',
|
||||
labelNames: ['campaign_id'],
|
||||
buckets: [0.5, 1, 2, 5, 10, 30]
|
||||
}),
|
||||
|
||||
// Rate limiting metrics
|
||||
rateLimitHits: new promClient.Counter({
|
||||
name: 'rate_limit_hits_total',
|
||||
help: 'Total number of rate limit hits',
|
||||
labelNames: ['endpoint', 'user_type']
|
||||
}),
|
||||
|
||||
// Authentication metrics
|
||||
authAttempts: new promClient.Counter({
|
||||
name: 'auth_attempts_total',
|
||||
help: 'Total number of authentication attempts',
|
||||
labelNames: ['type', 'status']
|
||||
}),
|
||||
|
||||
// API key metrics
|
||||
apiKeyUsage: new promClient.Counter({
|
||||
name: 'api_key_usage_total',
|
||||
help: 'Total API key usage',
|
||||
labelNames: ['key_name', 'endpoint']
|
||||
}),
|
||||
|
||||
// Queue metrics
|
||||
queueSize: new promClient.Gauge({
|
||||
name: 'queue_size',
|
||||
help: 'Current queue size',
|
||||
labelNames: ['queue_name']
|
||||
}),
|
||||
|
||||
queueProcessingTime: new promClient.Histogram({
|
||||
name: 'queue_processing_time_seconds',
|
||||
help: 'Queue job processing time',
|
||||
labelNames: ['queue_name', 'job_type'],
|
||||
buckets: [0.1, 0.5, 1, 5, 10, 30, 60, 120]
|
||||
}),
|
||||
|
||||
// System metrics
|
||||
systemLoad: new promClient.Gauge({
|
||||
name: 'system_load_average',
|
||||
help: 'System load average',
|
||||
labelNames: ['interval']
|
||||
}),
|
||||
|
||||
memoryUsage: new promClient.Gauge({
|
||||
name: 'memory_usage_bytes',
|
||||
help: 'Memory usage in bytes',
|
||||
labelNames: ['type']
|
||||
}),
|
||||
|
||||
// Service health
|
||||
serviceHealth: new promClient.Gauge({
|
||||
name: 'service_health',
|
||||
help: 'Service health status (1 = healthy, 0 = unhealthy)',
|
||||
labelNames: ['service']
|
||||
})
|
||||
};
|
||||
|
||||
// Register all custom metrics
|
||||
Object.values(metrics).forEach(metric => register.registerMetric(metric));
|
||||
|
||||
// Alert manager
|
||||
class AlertManager extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.alerts = new Map();
|
||||
this.thresholds = {
|
||||
highErrorRate: { threshold: 0.05, window: 300000 }, // 5% error rate over 5 minutes
|
||||
highResponseTime: { threshold: 1000, window: 60000 }, // 1s response time over 1 minute
|
||||
lowMemory: { threshold: 0.9, window: 60000 }, // 90% memory usage
|
||||
highCpuUsage: { threshold: 0.8, window: 300000 }, // 80% CPU over 5 minutes
|
||||
queueBacklog: { threshold: 1000, window: 600000 }, // 1000 items in queue over 10 minutes
|
||||
authFailures: { threshold: 10, window: 300000 }, // 10 failures in 5 minutes
|
||||
serviceDown: { threshold: 3, window: 180000 } // 3 health check failures in 3 minutes
|
||||
};
|
||||
}
|
||||
|
||||
checkThreshold(metric, value, threshold) {
|
||||
const alertKey = `${metric}:${threshold.threshold}`;
|
||||
const now = Date.now();
|
||||
|
||||
if (value > threshold.threshold) {
|
||||
if (!this.alerts.has(alertKey)) {
|
||||
this.alerts.set(alertKey, {
|
||||
firstSeen: now,
|
||||
lastSeen: now,
|
||||
count: 1,
|
||||
value
|
||||
});
|
||||
} else {
|
||||
const alert = this.alerts.get(alertKey);
|
||||
alert.lastSeen = now;
|
||||
alert.count++;
|
||||
alert.value = value;
|
||||
|
||||
if (alert.lastSeen - alert.firstSeen >= threshold.window) {
|
||||
this.emit('alert', {
|
||||
metric,
|
||||
threshold: threshold.threshold,
|
||||
value,
|
||||
duration: alert.lastSeen - alert.firstSeen,
|
||||
count: alert.count
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.alerts.delete(alertKey);
|
||||
}
|
||||
}
|
||||
|
||||
async sendAlert(alert) {
|
||||
logger.error('ALERT TRIGGERED', alert);
|
||||
|
||||
// Store alert in cache for dashboard
|
||||
const alertData = {
|
||||
...alert,
|
||||
timestamp: new Date().toISOString(),
|
||||
id: `${alert.metric}-${Date.now()}`
|
||||
};
|
||||
|
||||
await cache.lpush('system:alerts', JSON.stringify(alertData));
|
||||
await cache.ltrim('system:alerts', 0, 99); // Keep last 100 alerts
|
||||
|
||||
// Send notifications (implement based on your notification service)
|
||||
// await this.sendEmail(alert);
|
||||
// await this.sendSlack(alert);
|
||||
// await this.sendWebhook(alert);
|
||||
}
|
||||
}
|
||||
|
||||
const alertManager = new AlertManager();
|
||||
|
||||
// Monitor system resources
|
||||
export function startSystemMonitoring() {
|
||||
setInterval(() => {
|
||||
// CPU usage
|
||||
const cpus = os.cpus();
|
||||
const cpuUsage = cpus.reduce((acc, cpu) => {
|
||||
const total = Object.values(cpu.times).reduce((a, b) => a + b);
|
||||
const idle = cpu.times.idle;
|
||||
return acc + (1 - idle / total);
|
||||
}, 0) / cpus.length;
|
||||
|
||||
// Memory usage
|
||||
const totalMem = os.totalmem();
|
||||
const freeMem = os.freemem();
|
||||
const usedMem = totalMem - freeMem;
|
||||
const memoryUsagePercent = usedMem / totalMem;
|
||||
|
||||
// Update metrics
|
||||
metrics.memoryUsage.set({ type: 'used' }, usedMem);
|
||||
metrics.memoryUsage.set({ type: 'free' }, freeMem);
|
||||
metrics.memoryUsage.set({ type: 'total' }, totalMem);
|
||||
|
||||
// Load average
|
||||
const loadAvg = os.loadavg();
|
||||
metrics.systemLoad.set({ interval: '1m' }, loadAvg[0]);
|
||||
metrics.systemLoad.set({ interval: '5m' }, loadAvg[1]);
|
||||
metrics.systemLoad.set({ interval: '15m' }, loadAvg[2]);
|
||||
|
||||
// Check thresholds
|
||||
alertManager.checkThreshold('cpu_usage', cpuUsage, alertManager.thresholds.highCpuUsage);
|
||||
alertManager.checkThreshold('memory_usage', memoryUsagePercent, alertManager.thresholds.lowMemory);
|
||||
|
||||
}, 10000); // Every 10 seconds
|
||||
}
|
||||
|
||||
// Monitor service health
|
||||
export async function checkServiceHealth(services) {
|
||||
for (const [name, config] of Object.entries(services)) {
|
||||
try {
|
||||
const response = await axios.get(`${config.url}/health`, {
|
||||
timeout: 5000
|
||||
});
|
||||
|
||||
const isHealthy = response.status === 200 && response.data.status === 'healthy';
|
||||
metrics.serviceHealth.set({ service: name }, isHealthy ? 1 : 0);
|
||||
|
||||
if (!isHealthy) {
|
||||
alertManager.checkThreshold(`service_down_${name}`, 1, alertManager.thresholds.serviceDown);
|
||||
}
|
||||
} catch (error) {
|
||||
metrics.serviceHealth.set({ service: name }, 0);
|
||||
alertManager.checkThreshold(`service_down_${name}`, 1, alertManager.thresholds.serviceDown);
|
||||
|
||||
logger.error(`Health check failed for ${name}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export metrics for Prometheus
|
||||
export function getMetrics() {
|
||||
return register.metrics();
|
||||
}
|
||||
|
||||
// Export content type for Prometheus
|
||||
export function getMetricsContentType() {
|
||||
return register.contentType;
|
||||
}
|
||||
|
||||
// Record HTTP request metrics
|
||||
export function recordHttpMetrics(req, res, duration) {
|
||||
const route = req.route?.path || req.path;
|
||||
const method = req.method;
|
||||
const statusCode = res.statusCode;
|
||||
|
||||
metrics.httpRequestDuration.observe(
|
||||
{ method, route, status_code: statusCode },
|
||||
duration / 1000
|
||||
);
|
||||
|
||||
metrics.httpRequestTotal.inc({ method, route, status_code: statusCode });
|
||||
|
||||
if (statusCode >= 400) {
|
||||
const errorType = statusCode >= 500 ? 'server_error' : 'client_error';
|
||||
metrics.httpRequestErrors.inc({ method, route, error_type: errorType });
|
||||
|
||||
// Check error rate
|
||||
// Implementation would need to track error rate over time
|
||||
}
|
||||
}
|
||||
|
||||
// Record business metrics
|
||||
export function recordBusinessMetrics(eventType, data) {
|
||||
switch (eventType) {
|
||||
case 'campaign_started':
|
||||
metrics.campaignsActive.inc();
|
||||
break;
|
||||
|
||||
case 'campaign_completed':
|
||||
metrics.campaignsActive.dec();
|
||||
break;
|
||||
|
||||
case 'message_sent':
|
||||
metrics.messagesSent.inc({
|
||||
campaign_id: data.campaignId,
|
||||
status: data.status
|
||||
});
|
||||
metrics.messageSendDuration.observe(
|
||||
{ campaign_id: data.campaignId },
|
||||
data.duration / 1000
|
||||
);
|
||||
break;
|
||||
|
||||
case 'rate_limit_hit':
|
||||
metrics.rateLimitHits.inc({
|
||||
endpoint: data.endpoint,
|
||||
user_type: data.userType || 'anonymous'
|
||||
});
|
||||
break;
|
||||
|
||||
case 'auth_attempt':
|
||||
metrics.authAttempts.inc({
|
||||
type: data.type,
|
||||
status: data.status
|
||||
});
|
||||
|
||||
if (data.status === 'failed') {
|
||||
// Track failed auth attempts for alerting
|
||||
}
|
||||
break;
|
||||
|
||||
case 'api_key_used':
|
||||
metrics.apiKeyUsage.inc({
|
||||
key_name: data.keyName,
|
||||
endpoint: data.endpoint
|
||||
});
|
||||
break;
|
||||
|
||||
case 'queue_size_update':
|
||||
metrics.queueSize.set(
|
||||
{ queue_name: data.queueName },
|
||||
data.size
|
||||
);
|
||||
|
||||
alertManager.checkThreshold(
|
||||
`queue_backlog_${data.queueName}`,
|
||||
data.size,
|
||||
alertManager.thresholds.queueBacklog
|
||||
);
|
||||
break;
|
||||
|
||||
case 'queue_job_processed':
|
||||
metrics.queueProcessingTime.observe(
|
||||
{ queue_name: data.queueName, job_type: data.jobType },
|
||||
data.duration / 1000
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Alert event handlers
|
||||
alertManager.on('alert', (alert) => {
|
||||
alertManager.sendAlert(alert);
|
||||
});
|
||||
|
||||
// Export alert manager for external use
|
||||
export { alertManager };
|
||||
|
||||
// Dashboard data aggregation
|
||||
export async function getDashboardMetrics() {
|
||||
const metrics = await register.getMetricsAsJSON();
|
||||
const alerts = await cache.lrange('system:alerts', 0, 19); // Last 20 alerts
|
||||
|
||||
return {
|
||||
metrics: metrics.reduce((acc, metric) => {
|
||||
acc[metric.name] = metric.values || metric.value;
|
||||
return acc;
|
||||
}, {}),
|
||||
alerts: alerts.map(a => JSON.parse(a)),
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
|
||||
// Start monitoring
|
||||
export function initializeMonitoring() {
|
||||
startSystemMonitoring();
|
||||
logger.info('System monitoring initialized');
|
||||
}
|
||||
507
marketing-agent/services/api-gateway/src/services/scheduler.js
Normal file
507
marketing-agent/services/api-gateway/src/services/scheduler.js
Normal file
@@ -0,0 +1,507 @@
|
||||
import cron from 'node-cron';
|
||||
import { backupService } from './backup.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { cache } from '../utils/cache.js';
|
||||
|
||||
/**
|
||||
* Scheduler service for automated tasks
|
||||
*/
|
||||
class SchedulerService {
|
||||
constructor() {
|
||||
this.jobs = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize scheduler
|
||||
*/
|
||||
async initialize() {
|
||||
logger.info('Initializing scheduler service');
|
||||
|
||||
// Load saved schedules
|
||||
await this.loadSchedules();
|
||||
|
||||
// Start default schedules
|
||||
this.startDefaultSchedules();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load saved schedules from cache
|
||||
*/
|
||||
async loadSchedules() {
|
||||
try {
|
||||
// Load backup schedule
|
||||
const backupConfig = await cache.get('backup:schedule:config');
|
||||
if (backupConfig) {
|
||||
const config = JSON.parse(backupConfig);
|
||||
if (config.enabled && config.schedule) {
|
||||
this.scheduleBackup(config);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to load schedules', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start default schedules
|
||||
*/
|
||||
startDefaultSchedules() {
|
||||
// Daily health check report at 8 AM
|
||||
this.scheduleJob('daily-health-report', '0 8 * * *', async () => {
|
||||
await this.generateHealthReport();
|
||||
});
|
||||
|
||||
// Hourly metrics aggregation
|
||||
this.scheduleJob('metrics-aggregation', '0 * * * *', async () => {
|
||||
await this.aggregateMetrics();
|
||||
});
|
||||
|
||||
// Clean up old logs daily at 2 AM
|
||||
this.scheduleJob('log-cleanup', '0 2 * * *', async () => {
|
||||
await this.cleanupOldLogs();
|
||||
});
|
||||
|
||||
// Session cleanup every 6 hours
|
||||
this.scheduleJob('session-cleanup', '0 */6 * * *', async () => {
|
||||
await this.cleanupExpiredSessions();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule a backup job
|
||||
*/
|
||||
scheduleBackup(config) {
|
||||
const { schedule, encrypt, uploadToCloud, retentionDays } = config;
|
||||
|
||||
this.scheduleJob('scheduled-backup', schedule, async () => {
|
||||
logger.info('Starting scheduled backup');
|
||||
|
||||
try {
|
||||
// Check if backup is already running
|
||||
const isRunning = await cache.get('backup:running');
|
||||
if (isRunning) {
|
||||
logger.warn('Scheduled backup skipped - backup already running');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create backup
|
||||
const result = await backupService.createFullBackup({
|
||||
description: 'Scheduled backup',
|
||||
encrypt,
|
||||
uploadToCloud,
|
||||
initiatedBy: 'scheduler'
|
||||
});
|
||||
|
||||
logger.info('Scheduled backup completed', result);
|
||||
|
||||
// Clean up old backups if retention is set
|
||||
if (retentionDays) {
|
||||
const cleanupResult = await backupService.cleanupOldBackups(retentionDays);
|
||||
logger.info('Backup cleanup completed', cleanupResult);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Scheduled backup failed', error);
|
||||
|
||||
// Send alert
|
||||
await this.sendAlert({
|
||||
type: 'backup_failure',
|
||||
severity: 'critical',
|
||||
message: 'Scheduled backup failed',
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule a job
|
||||
*/
|
||||
scheduleJob(name, schedule, task) {
|
||||
// Stop existing job if any
|
||||
if (this.jobs.has(name)) {
|
||||
this.jobs.get(name).stop();
|
||||
}
|
||||
|
||||
// Validate cron expression
|
||||
if (!cron.validate(schedule)) {
|
||||
throw new Error(`Invalid cron expression: ${schedule}`);
|
||||
}
|
||||
|
||||
// Create and start job
|
||||
const job = cron.schedule(schedule, async () => {
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
logger.info(`Running scheduled job: ${name}`);
|
||||
await task();
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
logger.info(`Scheduled job completed: ${name}`, { duration });
|
||||
|
||||
// Record job execution
|
||||
await this.recordJobExecution(name, 'success', duration);
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
logger.error(`Scheduled job failed: ${name}`, error);
|
||||
|
||||
// Record job failure
|
||||
await this.recordJobExecution(name, 'failure', duration, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
this.jobs.set(name, job);
|
||||
logger.info(`Scheduled job registered: ${name}`, { schedule });
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a scheduled job
|
||||
*/
|
||||
stopJob(name) {
|
||||
const job = this.jobs.get(name);
|
||||
if (job) {
|
||||
job.stop();
|
||||
this.jobs.delete(name);
|
||||
logger.info(`Scheduled job stopped: ${name}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record job execution
|
||||
*/
|
||||
async recordJobExecution(jobName, status, duration, error = null) {
|
||||
const execution = {
|
||||
jobName,
|
||||
status,
|
||||
duration,
|
||||
error,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
await cache.lpush('scheduler:executions', JSON.stringify(execution));
|
||||
await cache.ltrim('scheduler:executions', 0, 999); // Keep last 1000 executions
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate health report
|
||||
*/
|
||||
async generateHealthReport() {
|
||||
logger.info('Generating daily health report');
|
||||
|
||||
try {
|
||||
// Collect system metrics
|
||||
const metrics = await this.collectSystemMetrics();
|
||||
|
||||
// Get service statuses
|
||||
const services = await this.checkAllServices();
|
||||
|
||||
// Get recent errors
|
||||
const errors = await this.getRecentErrors();
|
||||
|
||||
// Get backup status
|
||||
const backupStatus = await this.getBackupStatus();
|
||||
|
||||
const report = {
|
||||
date: new Date().toISOString(),
|
||||
system: metrics,
|
||||
services,
|
||||
errors,
|
||||
backups: backupStatus,
|
||||
alerts: await this.getActiveAlerts()
|
||||
};
|
||||
|
||||
// Store report
|
||||
await cache.set(
|
||||
`health-report:${new Date().toISOString().split('T')[0]}`,
|
||||
JSON.stringify(report),
|
||||
'EX',
|
||||
30 * 24 * 60 * 60 // Keep for 30 days
|
||||
);
|
||||
|
||||
// Send report if configured
|
||||
if (process.env.HEALTH_REPORT_EMAIL) {
|
||||
await this.sendHealthReport(report);
|
||||
}
|
||||
|
||||
logger.info('Health report generated');
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate health report', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregate metrics
|
||||
*/
|
||||
async aggregateMetrics() {
|
||||
logger.info('Aggregating hourly metrics');
|
||||
|
||||
try {
|
||||
// Get current hour
|
||||
const now = new Date();
|
||||
const hour = now.toISOString().substring(0, 13);
|
||||
|
||||
// Aggregate different metric types
|
||||
const aggregations = await Promise.all([
|
||||
this.aggregateHttpMetrics(hour),
|
||||
this.aggregateBusinessMetrics(hour),
|
||||
this.aggregateQueueMetrics(hour),
|
||||
this.aggregateSystemMetrics(hour)
|
||||
]);
|
||||
|
||||
const [http, business, queue, system] = aggregations;
|
||||
|
||||
const hourlyMetrics = {
|
||||
hour,
|
||||
http,
|
||||
business,
|
||||
queue,
|
||||
system,
|
||||
timestamp: now.toISOString()
|
||||
};
|
||||
|
||||
// Store aggregated metrics
|
||||
await cache.set(
|
||||
`metrics:hourly:${hour}`,
|
||||
JSON.stringify(hourlyMetrics),
|
||||
'EX',
|
||||
7 * 24 * 60 * 60 // Keep for 7 days
|
||||
);
|
||||
|
||||
logger.info('Metrics aggregation completed', { hour });
|
||||
} catch (error) {
|
||||
logger.error('Failed to aggregate metrics', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old logs
|
||||
*/
|
||||
async cleanupOldLogs() {
|
||||
logger.info('Cleaning up old logs');
|
||||
|
||||
try {
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - 30); // 30 days retention
|
||||
|
||||
// Clean up different log types
|
||||
const logKeys = [
|
||||
'app:logs',
|
||||
'system:alerts',
|
||||
'monitoring:events',
|
||||
'scheduler:executions'
|
||||
];
|
||||
|
||||
let totalCleaned = 0;
|
||||
|
||||
for (const key of logKeys) {
|
||||
const logs = await cache.lrange(key, 0, -1);
|
||||
const toKeep = [];
|
||||
|
||||
for (const log of logs) {
|
||||
try {
|
||||
const parsed = JSON.parse(log);
|
||||
const logDate = new Date(parsed.timestamp || parsed.date);
|
||||
|
||||
if (logDate > cutoffDate) {
|
||||
toKeep.push(log);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid entries
|
||||
}
|
||||
}
|
||||
|
||||
if (toKeep.length < logs.length) {
|
||||
// Replace with filtered logs
|
||||
await cache.del(key);
|
||||
if (toKeep.length > 0) {
|
||||
await cache.rpush(key, ...toKeep);
|
||||
}
|
||||
|
||||
totalCleaned += logs.length - toKeep.length;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Log cleanup completed', {
|
||||
totalCleaned,
|
||||
cutoffDate: cutoffDate.toISOString()
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to cleanup logs', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired sessions
|
||||
*/
|
||||
async cleanupExpiredSessions() {
|
||||
logger.info('Cleaning up expired sessions');
|
||||
|
||||
try {
|
||||
// Get all session keys
|
||||
const sessionKeys = await cache.keys('sess:*');
|
||||
let cleaned = 0;
|
||||
|
||||
for (const key of sessionKeys) {
|
||||
const ttl = await cache.ttl(key);
|
||||
|
||||
// Remove sessions with no TTL or expired
|
||||
if (ttl === -2 || ttl === -1) {
|
||||
await cache.del(key);
|
||||
cleaned++;
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up orphaned user sessions
|
||||
const userSessionKeys = await cache.keys('user:*:sessions');
|
||||
|
||||
for (const key of userSessionKeys) {
|
||||
const sessions = await cache.smembers(key);
|
||||
const validSessions = [];
|
||||
|
||||
for (const sessionId of sessions) {
|
||||
const exists = await cache.exists(`sess:${sessionId}`);
|
||||
if (exists) {
|
||||
validSessions.push(sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
if (validSessions.length < sessions.length) {
|
||||
await cache.del(key);
|
||||
if (validSessions.length > 0) {
|
||||
await cache.sadd(key, ...validSessions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Session cleanup completed', { cleaned });
|
||||
} catch (error) {
|
||||
logger.error('Failed to cleanup sessions', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send alert
|
||||
*/
|
||||
async sendAlert(alert) {
|
||||
// Store alert
|
||||
await cache.lpush('system:alerts', JSON.stringify({
|
||||
...alert,
|
||||
timestamp: new Date().toISOString()
|
||||
}));
|
||||
await cache.ltrim('system:alerts', 0, 99);
|
||||
|
||||
// Send notifications based on severity
|
||||
// Implementation depends on notification services
|
||||
logger.error('ALERT', alert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper methods for health report
|
||||
*/
|
||||
async collectSystemMetrics() {
|
||||
// Implement system metrics collection
|
||||
return {
|
||||
uptime: process.uptime(),
|
||||
memory: process.memoryUsage(),
|
||||
cpu: process.cpuUsage()
|
||||
};
|
||||
}
|
||||
|
||||
async checkAllServices() {
|
||||
// Implement service health checks
|
||||
return {};
|
||||
}
|
||||
|
||||
async getRecentErrors() {
|
||||
// Get recent errors from logs
|
||||
const errors = await cache.lrange('app:logs', 0, 99);
|
||||
return errors
|
||||
.map(e => {
|
||||
try {
|
||||
return JSON.parse(e);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(e => e && e.level === 'error')
|
||||
.slice(0, 10);
|
||||
}
|
||||
|
||||
async getBackupStatus() {
|
||||
const lastBackup = await cache.lindex('backup:history', 0);
|
||||
return lastBackup ? JSON.parse(lastBackup) : null;
|
||||
}
|
||||
|
||||
async getActiveAlerts() {
|
||||
const alerts = await cache.lrange('system:alerts', 0, 19);
|
||||
return alerts.map(a => JSON.parse(a));
|
||||
}
|
||||
|
||||
async sendHealthReport(report) {
|
||||
// Implement email sending
|
||||
logger.info('Health report would be sent', {
|
||||
to: process.env.HEALTH_REPORT_EMAIL
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Metrics aggregation helpers
|
||||
*/
|
||||
async aggregateHttpMetrics(hour) {
|
||||
// Implement HTTP metrics aggregation
|
||||
return {
|
||||
requests: 0,
|
||||
errors: 0,
|
||||
avgResponseTime: 0
|
||||
};
|
||||
}
|
||||
|
||||
async aggregateBusinessMetrics(hour) {
|
||||
// Implement business metrics aggregation
|
||||
return {
|
||||
messagesSent: 0,
|
||||
campaignsActive: 0,
|
||||
successRate: 0
|
||||
};
|
||||
}
|
||||
|
||||
async aggregateQueueMetrics(hour) {
|
||||
// Implement queue metrics aggregation
|
||||
return {
|
||||
processed: 0,
|
||||
failed: 0,
|
||||
avgProcessingTime: 0
|
||||
};
|
||||
}
|
||||
|
||||
async aggregateSystemMetrics(hour) {
|
||||
// Implement system metrics aggregation
|
||||
return {
|
||||
avgCpu: 0,
|
||||
avgMemory: 0,
|
||||
avgLoad: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get scheduler status
|
||||
*/
|
||||
getStatus() {
|
||||
const jobs = [];
|
||||
|
||||
for (const [name, job] of this.jobs) {
|
||||
jobs.push({
|
||||
name,
|
||||
running: job.running,
|
||||
nextRun: job.nextDates(1)[0]
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
jobs,
|
||||
totalJobs: jobs.length
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const schedulerService = new SchedulerService();
|
||||
@@ -0,0 +1,267 @@
|
||||
import axios from 'axios';
|
||||
import { config } from '../config/index.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import CircuitBreaker from '../utils/circuitBreaker.js';
|
||||
|
||||
class ServiceDiscovery {
|
||||
constructor() {
|
||||
this.services = new Map();
|
||||
this.healthStatus = new Map();
|
||||
this.circuitBreakers = new Map();
|
||||
|
||||
this.initializeServices();
|
||||
this.startHealthChecks();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize services registry
|
||||
*/
|
||||
initializeServices() {
|
||||
Object.entries(config.services).forEach(([name, serviceConfig]) => {
|
||||
this.services.set(name, {
|
||||
name,
|
||||
url: serviceConfig.url,
|
||||
timeout: serviceConfig.timeout,
|
||||
healthy: true,
|
||||
lastCheck: null
|
||||
});
|
||||
|
||||
// Create circuit breaker for each service
|
||||
const breaker = new CircuitBreaker({
|
||||
timeout: serviceConfig.timeout,
|
||||
errorThreshold: config.circuitBreaker.errorThreshold,
|
||||
resetTimeout: config.circuitBreaker.resetTimeout
|
||||
});
|
||||
|
||||
this.circuitBreakers.set(name, breaker);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get service info
|
||||
*/
|
||||
getService(serviceName) {
|
||||
const service = this.services.get(serviceName);
|
||||
if (!service) {
|
||||
throw new Error(`Service ${serviceName} not found`);
|
||||
}
|
||||
|
||||
if (!service.healthy) {
|
||||
throw new Error(`Service ${serviceName} is unhealthy`);
|
||||
}
|
||||
|
||||
return service;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all healthy services
|
||||
*/
|
||||
getHealthyServices() {
|
||||
const healthy = [];
|
||||
this.services.forEach((service, name) => {
|
||||
if (service.healthy) {
|
||||
healthy.push({ name, ...service });
|
||||
}
|
||||
});
|
||||
return healthy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get circuit breaker for service
|
||||
*/
|
||||
getCircuitBreaker(serviceName) {
|
||||
return this.circuitBreakers.get(serviceName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Make request to service with circuit breaker
|
||||
*/
|
||||
async makeRequest(serviceName, options) {
|
||||
const service = this.getService(serviceName);
|
||||
const breaker = this.getCircuitBreaker(serviceName);
|
||||
|
||||
if (!breaker) {
|
||||
throw new Error(`Circuit breaker not found for service ${serviceName}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await breaker.execute(async () => {
|
||||
return await axios({
|
||||
...options,
|
||||
baseURL: service.url,
|
||||
timeout: options.timeout || service.timeout
|
||||
});
|
||||
});
|
||||
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
logger.error(`Request to ${serviceName} failed:`, error);
|
||||
|
||||
// Mark service as unhealthy on repeated failures
|
||||
if (breaker.isOpen()) {
|
||||
this.markServiceUnhealthy(serviceName);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check health of a service
|
||||
*/
|
||||
async checkServiceHealth(serviceName) {
|
||||
const service = this.services.get(serviceName);
|
||||
if (!service) return false;
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${service.url}/health`, {
|
||||
timeout: config.healthCheck.timeout
|
||||
});
|
||||
|
||||
const isHealthy = response.status === 200 &&
|
||||
response.data.status === 'healthy';
|
||||
|
||||
this.updateServiceHealth(serviceName, isHealthy);
|
||||
return isHealthy;
|
||||
} catch (error) {
|
||||
logger.warn(`Health check failed for ${serviceName}:`, error.message);
|
||||
this.updateServiceHealth(serviceName, false);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update service health status
|
||||
*/
|
||||
updateServiceHealth(serviceName, isHealthy) {
|
||||
const service = this.services.get(serviceName);
|
||||
if (!service) return;
|
||||
|
||||
const previousStatus = service.healthy;
|
||||
service.healthy = isHealthy;
|
||||
service.lastCheck = new Date();
|
||||
|
||||
// Update health status tracking
|
||||
const healthHistory = this.healthStatus.get(serviceName) || [];
|
||||
healthHistory.push({
|
||||
healthy: isHealthy,
|
||||
timestamp: new Date()
|
||||
});
|
||||
|
||||
// Keep only last 10 health checks
|
||||
if (healthHistory.length > 10) {
|
||||
healthHistory.shift();
|
||||
}
|
||||
this.healthStatus.set(serviceName, healthHistory);
|
||||
|
||||
// Log status change
|
||||
if (previousStatus !== isHealthy) {
|
||||
if (isHealthy) {
|
||||
logger.info(`Service ${serviceName} is now healthy`);
|
||||
} else {
|
||||
logger.error(`Service ${serviceName} is now unhealthy`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark service as unhealthy
|
||||
*/
|
||||
markServiceUnhealthy(serviceName) {
|
||||
this.updateServiceHealth(serviceName, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start periodic health checks
|
||||
*/
|
||||
startHealthChecks() {
|
||||
setInterval(async () => {
|
||||
for (const [serviceName] of this.services) {
|
||||
try {
|
||||
await this.checkServiceHealth(serviceName);
|
||||
} catch (error) {
|
||||
logger.error(`Health check error for ${serviceName}:`, error);
|
||||
}
|
||||
}
|
||||
}, config.healthCheck.interval);
|
||||
|
||||
// Initial health check
|
||||
this.checkAllServices();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check all services health
|
||||
*/
|
||||
async checkAllServices() {
|
||||
const promises = [];
|
||||
for (const [serviceName] of this.services) {
|
||||
promises.push(this.checkServiceHealth(serviceName));
|
||||
}
|
||||
await Promise.allSettled(promises);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get service status summary
|
||||
*/
|
||||
getServiceStatus() {
|
||||
const status = {};
|
||||
|
||||
this.services.forEach((service, name) => {
|
||||
const healthHistory = this.healthStatus.get(name) || [];
|
||||
const breaker = this.circuitBreakers.get(name);
|
||||
|
||||
status[name] = {
|
||||
healthy: service.healthy,
|
||||
lastCheck: service.lastCheck,
|
||||
circuitBreakerState: breaker ? breaker.getState() : 'unknown',
|
||||
healthHistory: healthHistory.slice(-5), // Last 5 checks
|
||||
url: service.url
|
||||
};
|
||||
});
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated health status
|
||||
*/
|
||||
getAggregatedHealth() {
|
||||
let totalServices = 0;
|
||||
let healthyServices = 0;
|
||||
|
||||
this.services.forEach(service => {
|
||||
totalServices++;
|
||||
if (service.healthy) {
|
||||
healthyServices++;
|
||||
}
|
||||
});
|
||||
|
||||
const healthPercentage = totalServices > 0
|
||||
? (healthyServices / totalServices) * 100
|
||||
: 0;
|
||||
|
||||
return {
|
||||
status: healthPercentage >= 80 ? 'healthy' :
|
||||
healthPercentage >= 50 ? 'degraded' : 'unhealthy',
|
||||
healthyServices,
|
||||
totalServices,
|
||||
healthPercentage: Math.round(healthPercentage),
|
||||
services: this.getServiceStatus()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Force refresh service status
|
||||
*/
|
||||
async refreshServiceStatus(serviceName) {
|
||||
if (serviceName) {
|
||||
return await this.checkServiceHealth(serviceName);
|
||||
} else {
|
||||
await this.checkAllServices();
|
||||
return this.getServiceStatus();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
export const serviceDiscovery = new ServiceDiscovery();
|
||||
294
marketing-agent/services/api-gateway/src/utils/cache.js
Normal file
294
marketing-agent/services/api-gateway/src/utils/cache.js
Normal file
@@ -0,0 +1,294 @@
|
||||
import NodeCache from 'node-cache';
|
||||
import Redis from 'ioredis';
|
||||
import { config } from '../config/index.js';
|
||||
import { logger } from './logger.js';
|
||||
|
||||
class CacheManager {
|
||||
constructor() {
|
||||
// Local in-memory cache for fast access
|
||||
this.localCache = new NodeCache({
|
||||
stdTTL: 600, // 10 minutes default
|
||||
checkperiod: 120, // Check for expired keys every 2 minutes
|
||||
useClones: false
|
||||
});
|
||||
|
||||
// Redis for distributed cache
|
||||
this.redis = new Redis({
|
||||
host: config.redis.host,
|
||||
port: config.redis.port,
|
||||
password: config.redis.password,
|
||||
retryStrategy: (times) => {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
}
|
||||
});
|
||||
|
||||
this.redis.on('connect', () => {
|
||||
logger.info('Connected to Redis cache');
|
||||
});
|
||||
|
||||
this.redis.on('error', (err) => {
|
||||
logger.error('Redis cache error:', err);
|
||||
});
|
||||
|
||||
// Cache statistics
|
||||
this.stats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
sets: 0,
|
||||
deletes: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get value from cache
|
||||
*/
|
||||
async get(key) {
|
||||
try {
|
||||
// Try local cache first
|
||||
const localValue = this.localCache.get(key);
|
||||
if (localValue !== undefined) {
|
||||
this.stats.hits++;
|
||||
return localValue;
|
||||
}
|
||||
|
||||
// Try Redis
|
||||
const redisValue = await this.redis.get(key);
|
||||
if (redisValue) {
|
||||
this.stats.hits++;
|
||||
// Store in local cache for faster access
|
||||
this.localCache.set(key, redisValue, 300); // 5 minutes
|
||||
return redisValue;
|
||||
}
|
||||
|
||||
this.stats.misses++;
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error('Cache get error:', error);
|
||||
this.stats.misses++;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set value in cache
|
||||
*/
|
||||
async set(key, value, ttl = config.redis.ttl) {
|
||||
try {
|
||||
// Store in both caches
|
||||
this.localCache.set(key, value, Math.min(ttl, 600)); // Max 10 minutes in local
|
||||
await this.redis.set(key, value, 'EX', ttl);
|
||||
this.stats.sets++;
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('Cache set error:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete value from cache
|
||||
*/
|
||||
async del(key) {
|
||||
try {
|
||||
this.localCache.del(key);
|
||||
await this.redis.del(key);
|
||||
this.stats.deletes++;
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('Cache delete error:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete multiple keys by pattern
|
||||
*/
|
||||
async delPattern(pattern) {
|
||||
try {
|
||||
// Clear matching keys from local cache
|
||||
const localKeys = this.localCache.keys();
|
||||
const regex = new RegExp(pattern.replace('*', '.*'));
|
||||
localKeys.forEach(key => {
|
||||
if (regex.test(key)) {
|
||||
this.localCache.del(key);
|
||||
}
|
||||
});
|
||||
|
||||
// Clear from Redis
|
||||
const keys = await this.redis.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await this.redis.del(...keys);
|
||||
}
|
||||
|
||||
this.stats.deletes += keys.length;
|
||||
return keys.length;
|
||||
} catch (error) {
|
||||
logger.error('Cache pattern delete error:', error);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if key exists
|
||||
*/
|
||||
async exists(key) {
|
||||
if (this.localCache.has(key)) {
|
||||
return true;
|
||||
}
|
||||
return await this.redis.exists(key) === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get multiple values
|
||||
*/
|
||||
async mget(keys) {
|
||||
try {
|
||||
const results = {};
|
||||
const missingKeys = [];
|
||||
|
||||
// Check local cache first
|
||||
for (const key of keys) {
|
||||
const value = this.localCache.get(key);
|
||||
if (value !== undefined) {
|
||||
results[key] = value;
|
||||
} else {
|
||||
missingKeys.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Get missing from Redis
|
||||
if (missingKeys.length > 0) {
|
||||
const values = await this.redis.mget(...missingKeys);
|
||||
missingKeys.forEach((key, index) => {
|
||||
if (values[index]) {
|
||||
results[key] = values[index];
|
||||
// Cache locally
|
||||
this.localCache.set(key, values[index], 300);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
logger.error('Cache mget error:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set with expiration
|
||||
*/
|
||||
async setex(key, seconds, value) {
|
||||
return this.set(key, value, seconds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment counter
|
||||
*/
|
||||
async incr(key) {
|
||||
try {
|
||||
const value = await this.redis.incr(key);
|
||||
this.localCache.set(key, value, 60); // 1 minute
|
||||
return value;
|
||||
} catch (error) {
|
||||
logger.error('Cache incr error:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add to set
|
||||
*/
|
||||
async sadd(key, ...members) {
|
||||
try {
|
||||
return await this.redis.sadd(key, ...members);
|
||||
} catch (error) {
|
||||
logger.error('Cache sadd error:', error);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get set members
|
||||
*/
|
||||
async smembers(key) {
|
||||
try {
|
||||
return await this.redis.smembers(key);
|
||||
} catch (error) {
|
||||
logger.error('Cache smembers error:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache middleware for Express routes
|
||||
*/
|
||||
middleware(keyGenerator, ttl = 300) {
|
||||
return async (req, res, next) => {
|
||||
const key = typeof keyGenerator === 'function'
|
||||
? keyGenerator(req)
|
||||
: `cache:${req.method}:${req.originalUrl}`;
|
||||
|
||||
const cached = await this.get(key);
|
||||
if (cached) {
|
||||
try {
|
||||
const data = JSON.parse(cached);
|
||||
return res.json(data);
|
||||
} catch {
|
||||
return res.send(cached);
|
||||
}
|
||||
}
|
||||
|
||||
// Store original send
|
||||
const originalSend = res.send;
|
||||
res.send = function(body) {
|
||||
// Cache successful responses only
|
||||
if (res.statusCode === 200) {
|
||||
cache.set(key, body, ttl).catch(err => {
|
||||
logger.error('Failed to cache response:', err);
|
||||
});
|
||||
}
|
||||
originalSend.call(this, body);
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
getStats() {
|
||||
const localStats = this.localCache.getStats();
|
||||
return {
|
||||
...this.stats,
|
||||
hitRate: this.stats.hits / (this.stats.hits + this.stats.misses) || 0,
|
||||
local: {
|
||||
keys: localStats.keys,
|
||||
hits: localStats.hits,
|
||||
misses: localStats.misses
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all cache
|
||||
*/
|
||||
async flush() {
|
||||
this.localCache.flushAll();
|
||||
await this.redis.flushdb();
|
||||
logger.warn('Cache flushed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Close connections
|
||||
*/
|
||||
async close() {
|
||||
this.localCache.close();
|
||||
await this.redis.quit();
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
export const cache = new CacheManager();
|
||||
203
marketing-agent/services/api-gateway/src/utils/circuitBreaker.js
Normal file
203
marketing-agent/services/api-gateway/src/utils/circuitBreaker.js
Normal file
@@ -0,0 +1,203 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import { logger } from './logger.js';
|
||||
|
||||
export default class CircuitBreaker extends EventEmitter {
|
||||
constructor(options = {}) {
|
||||
super();
|
||||
|
||||
this.timeout = options.timeout || 10000;
|
||||
this.errorThreshold = options.errorThreshold || 50;
|
||||
this.resetTimeout = options.resetTimeout || 30000;
|
||||
this.monitoringPeriod = options.monitoringPeriod || 10000;
|
||||
|
||||
this.state = 'CLOSED'; // CLOSED, OPEN, HALF_OPEN
|
||||
this.failures = 0;
|
||||
this.successes = 0;
|
||||
this.requests = 0;
|
||||
this.lastFailureTime = null;
|
||||
this.nextAttempt = null;
|
||||
|
||||
this.stats = {
|
||||
totalRequests: 0,
|
||||
totalFailures: 0,
|
||||
totalSuccesses: 0,
|
||||
totalTimeouts: 0,
|
||||
consecutiveFailures: 0,
|
||||
lastError: null
|
||||
};
|
||||
|
||||
this.resetStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute function with circuit breaker protection
|
||||
*/
|
||||
async execute(fn) {
|
||||
if (this.state === 'OPEN') {
|
||||
if (Date.now() < this.nextAttempt) {
|
||||
const error = new Error('Circuit breaker is OPEN');
|
||||
error.code = 'CIRCUIT_OPEN';
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Try half-open state
|
||||
this.state = 'HALF_OPEN';
|
||||
logger.info('Circuit breaker entering HALF_OPEN state');
|
||||
}
|
||||
|
||||
this.requests++;
|
||||
this.stats.totalRequests++;
|
||||
|
||||
try {
|
||||
const result = await this.callWithTimeout(fn);
|
||||
this.onSuccess();
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.onFailure(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Call function with timeout
|
||||
*/
|
||||
async callWithTimeout(fn) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
const error = new Error('Operation timeout');
|
||||
error.code = 'TIMEOUT';
|
||||
this.stats.totalTimeouts++;
|
||||
reject(error);
|
||||
}, this.timeout);
|
||||
|
||||
try {
|
||||
const result = await fn();
|
||||
clearTimeout(timer);
|
||||
resolve(result);
|
||||
} catch (error) {
|
||||
clearTimeout(timer);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle successful request
|
||||
*/
|
||||
onSuccess() {
|
||||
this.failures = 0;
|
||||
this.successes++;
|
||||
this.stats.totalSuccesses++;
|
||||
this.stats.consecutiveFailures = 0;
|
||||
|
||||
if (this.state === 'HALF_OPEN') {
|
||||
this.state = 'CLOSED';
|
||||
this.emit('state-change', 'CLOSED');
|
||||
logger.info('Circuit breaker is now CLOSED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle failed request
|
||||
*/
|
||||
onFailure(error) {
|
||||
this.failures++;
|
||||
this.stats.totalFailures++;
|
||||
this.stats.consecutiveFailures++;
|
||||
this.stats.lastError = error;
|
||||
this.lastFailureTime = Date.now();
|
||||
|
||||
if (this.state === 'HALF_OPEN') {
|
||||
this.open();
|
||||
return;
|
||||
}
|
||||
|
||||
const errorRate = (this.failures / this.requests) * 100;
|
||||
|
||||
if (errorRate >= this.errorThreshold && this.requests >= 5) {
|
||||
this.open();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Open circuit breaker
|
||||
*/
|
||||
open() {
|
||||
this.state = 'OPEN';
|
||||
this.nextAttempt = Date.now() + this.resetTimeout;
|
||||
this.emit('state-change', 'OPEN');
|
||||
logger.error('Circuit breaker is now OPEN', {
|
||||
failures: this.failures,
|
||||
requests: this.requests,
|
||||
errorRate: `${(this.failures / this.requests * 100).toFixed(2)}%`
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Force close circuit breaker
|
||||
*/
|
||||
close() {
|
||||
this.state = 'CLOSED';
|
||||
this.failures = 0;
|
||||
this.successes = 0;
|
||||
this.requests = 0;
|
||||
this.emit('state-change', 'CLOSED');
|
||||
logger.info('Circuit breaker forcefully CLOSED');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset statistics
|
||||
*/
|
||||
resetStats() {
|
||||
// Reset monitoring window stats
|
||||
setTimeout(() => {
|
||||
this.failures = 0;
|
||||
this.successes = 0;
|
||||
this.requests = 0;
|
||||
this.resetStats();
|
||||
}, this.monitoringPeriod);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current state
|
||||
*/
|
||||
getState() {
|
||||
return this.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if circuit is open
|
||||
*/
|
||||
isOpen() {
|
||||
return this.state === 'OPEN';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics
|
||||
*/
|
||||
getStats() {
|
||||
const currentErrorRate = this.requests > 0
|
||||
? (this.failures / this.requests * 100).toFixed(2)
|
||||
: 0;
|
||||
|
||||
return {
|
||||
state: this.state,
|
||||
currentErrorRate: `${currentErrorRate}%`,
|
||||
threshold: `${this.errorThreshold}%`,
|
||||
...this.stats,
|
||||
monitoring: {
|
||||
requests: this.requests,
|
||||
failures: this.failures,
|
||||
successes: this.successes
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Health check
|
||||
*/
|
||||
isHealthy() {
|
||||
return this.state === 'CLOSED' ||
|
||||
(this.state === 'HALF_OPEN' && this.stats.consecutiveFailures === 0);
|
||||
}
|
||||
}
|
||||
137
marketing-agent/services/api-gateway/src/utils/logger.js
Normal file
137
marketing-agent/services/api-gateway/src/utils/logger.js
Normal file
@@ -0,0 +1,137 @@
|
||||
import winston from 'winston';
|
||||
import DailyRotateFile from 'winston-daily-rotate-file';
|
||||
import { config } from '../config/index.js';
|
||||
|
||||
const { combine, timestamp, json, printf, colorize, errors } = winston.format;
|
||||
|
||||
// Custom format for console output
|
||||
const consoleFormat = printf(({ level, message, timestamp, ...metadata }) => {
|
||||
let msg = `${timestamp} [${level}]: ${message}`;
|
||||
if (Object.keys(metadata).length > 0) {
|
||||
msg += ` ${JSON.stringify(metadata)}`;
|
||||
}
|
||||
return msg;
|
||||
});
|
||||
|
||||
// Create transport for daily rotate file
|
||||
const fileRotateTransport = new DailyRotateFile({
|
||||
filename: 'logs/api-gateway-%DATE%.log',
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
maxSize: '20m',
|
||||
maxFiles: '14d',
|
||||
format: combine(
|
||||
timestamp(),
|
||||
errors({ stack: true }),
|
||||
json()
|
||||
)
|
||||
});
|
||||
|
||||
// Create transport for error logs
|
||||
const errorFileTransport = new DailyRotateFile({
|
||||
level: 'error',
|
||||
filename: 'logs/api-gateway-error-%DATE%.log',
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
maxSize: '20m',
|
||||
maxFiles: '30d',
|
||||
format: combine(
|
||||
timestamp(),
|
||||
errors({ stack: true }),
|
||||
json()
|
||||
)
|
||||
});
|
||||
|
||||
// Create the logger
|
||||
export const logger = winston.createLogger({
|
||||
level: config.logging.level || 'info',
|
||||
format: combine(
|
||||
timestamp(),
|
||||
errors({ stack: true }),
|
||||
json()
|
||||
),
|
||||
defaultMeta: { service: 'api-gateway' },
|
||||
transports: [
|
||||
fileRotateTransport,
|
||||
errorFileTransport
|
||||
]
|
||||
});
|
||||
|
||||
// Add console transport in non-production environments
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.add(new winston.transports.Console({
|
||||
format: combine(
|
||||
colorize(),
|
||||
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
consoleFormat
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
||||
// Create a stream object for Morgan HTTP logger
|
||||
logger.stream = {
|
||||
write: (message) => {
|
||||
logger.info(message.trim());
|
||||
}
|
||||
};
|
||||
|
||||
// Request logging helper
|
||||
export const logRequest = (req, res, responseTime) => {
|
||||
const logData = {
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
status: res.statusCode,
|
||||
responseTime: `${responseTime}ms`,
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
requestId: req.id
|
||||
};
|
||||
|
||||
if (res.statusCode >= 400) {
|
||||
logger.error('Request failed', logData);
|
||||
} else {
|
||||
logger.info('Request completed', logData);
|
||||
}
|
||||
};
|
||||
|
||||
// Service call logging
|
||||
export const logServiceCall = (service, method, url, status, duration) => {
|
||||
const logData = {
|
||||
service,
|
||||
method,
|
||||
url,
|
||||
status,
|
||||
duration: `${duration}ms`
|
||||
};
|
||||
|
||||
if (status >= 400) {
|
||||
logger.error('Service call failed', logData);
|
||||
} else {
|
||||
logger.info('Service call completed', logData);
|
||||
}
|
||||
};
|
||||
|
||||
// Error logging helper
|
||||
export const logError = (message, error, metadata = {}) => {
|
||||
logger.error(message, {
|
||||
error: {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
code: error.code
|
||||
},
|
||||
...metadata
|
||||
});
|
||||
};
|
||||
|
||||
// Warning logging helper
|
||||
export const logWarning = (message, metadata = {}) => {
|
||||
logger.warn(message, metadata);
|
||||
};
|
||||
|
||||
// Info logging helper
|
||||
export const logInfo = (message, metadata = {}) => {
|
||||
logger.info(message, metadata);
|
||||
};
|
||||
|
||||
// Debug logging helper
|
||||
export const logDebug = (message, metadata = {}) => {
|
||||
logger.debug(message, metadata);
|
||||
};
|
||||
Reference in New Issue
Block a user