update
This commit is contained in:
256
config/bullmq.js
Normal file
256
config/bullmq.js
Normal file
@@ -0,0 +1,256 @@
|
||||
const { Queue, Worker } = require('bullmq');
|
||||
const Redis = require('ioredis');
|
||||
const config = require('./config.json');
|
||||
|
||||
/**
|
||||
* BullMQ Connection Configuration - Direct Redis connection
|
||||
*/
|
||||
const redisConfig = config.redis;
|
||||
|
||||
// Connection options for BullMQ (BullMQ will create connections using these options)
|
||||
const connectionOptions = {
|
||||
host: redisConfig.cluster[0].host,
|
||||
port: redisConfig.cluster[0].port,
|
||||
password: redisConfig.password,
|
||||
db: redisConfig.db,
|
||||
maxRetriesPerRequest: null,
|
||||
enableReadyCheck: false,
|
||||
};
|
||||
|
||||
// Create a shared Redis connection for direct Redis operations
|
||||
const bullMQConnection = new Redis(connectionOptions);
|
||||
|
||||
console.log('📊 BullMQ Redis Connection:', {
|
||||
host: connectionOptions.host,
|
||||
port: connectionOptions.port,
|
||||
db: connectionOptions.db,
|
||||
});
|
||||
|
||||
bullMQConnection.on('connect', () => {
|
||||
console.log('✅ BullMQ Redis connected');
|
||||
});
|
||||
|
||||
bullMQConnection.on('error', (err) => {
|
||||
console.error('❌ BullMQ Redis error:', err.message);
|
||||
});
|
||||
|
||||
/**
|
||||
* Default job options
|
||||
*/
|
||||
const defaultJobOptions = {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 2000,
|
||||
},
|
||||
removeOnComplete: {
|
||||
age: 24 * 3600, // Keep completed jobs for 24 hours
|
||||
count: 1000,
|
||||
},
|
||||
removeOnFail: {
|
||||
age: 7 * 24 * 3600, // Keep failed jobs for 7 days
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Queue definitions for different operations
|
||||
*/
|
||||
const QueueNames = {
|
||||
DATABASE_WRITE: 'database-write',
|
||||
NOTIFICATION: 'notification',
|
||||
ATTENDANCE_PROCESS: 'attendance-process',
|
||||
GRADE_CALCULATION: 'grade-calculation',
|
||||
REPORT_GENERATION: 'report-generation',
|
||||
};
|
||||
|
||||
/**
|
||||
* Create queues with connection options
|
||||
*/
|
||||
const queues = {};
|
||||
|
||||
Object.values(QueueNames).forEach(queueName => {
|
||||
queues[queueName] = new Queue(queueName, {
|
||||
connection: connectionOptions, // Use connection options, not instance
|
||||
prefix: process.env.BULLMQ_PREFIX || 'vcb',
|
||||
defaultJobOptions,
|
||||
});
|
||||
|
||||
queues[queueName].on('error', (error) => {
|
||||
console.error(`❌ Queue ${queueName} error:`, error.message);
|
||||
});
|
||||
|
||||
console.log(`✅ Queue ${queueName} initialized`);
|
||||
});
|
||||
|
||||
/**
|
||||
* Add job to database write queue
|
||||
* @param {string} operation - Operation type: 'create', 'update', 'delete'
|
||||
* @param {string} model - Model name
|
||||
* @param {object} data - Data to be written
|
||||
* @param {object} options - Additional options
|
||||
*/
|
||||
const addDatabaseWriteJob = async (operation, model, data, options = {}) => {
|
||||
try {
|
||||
const job = await queues[QueueNames.DATABASE_WRITE].add(
|
||||
`${operation}-${model}`,
|
||||
{
|
||||
operation,
|
||||
model,
|
||||
data,
|
||||
timestamp: new Date().toISOString(),
|
||||
userId: options.userId,
|
||||
...options,
|
||||
},
|
||||
{
|
||||
priority: options.priority || 5,
|
||||
delay: options.delay || 0,
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`✅ Database write job added: ${job.id} (${operation} ${model})`);
|
||||
return job;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error adding database write job:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add notification job
|
||||
*/
|
||||
const addNotificationJob = async (type, recipients, content, options = {}) => {
|
||||
try {
|
||||
const job = await queues[QueueNames.NOTIFICATION].add(
|
||||
`send-${type}`,
|
||||
{
|
||||
type,
|
||||
recipients,
|
||||
content,
|
||||
timestamp: new Date().toISOString(),
|
||||
...options,
|
||||
},
|
||||
{
|
||||
priority: options.priority || 5,
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`✅ Notification job added: ${job.id}`);
|
||||
return job;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error adding notification job:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add attendance processing job
|
||||
*/
|
||||
const addAttendanceProcessJob = async (schoolId, date, options = {}) => {
|
||||
try {
|
||||
const job = await queues[QueueNames.ATTENDANCE_PROCESS].add(
|
||||
'process-attendance',
|
||||
{
|
||||
schoolId,
|
||||
date,
|
||||
timestamp: new Date().toISOString(),
|
||||
...options,
|
||||
},
|
||||
{
|
||||
priority: options.priority || 3,
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`✅ Attendance process job added: ${job.id}`);
|
||||
return job;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error adding attendance process job:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add grade calculation job
|
||||
*/
|
||||
const addGradeCalculationJob = async (studentId, academicYearId, options = {}) => {
|
||||
try {
|
||||
const job = await queues[QueueNames.GRADE_CALCULATION].add(
|
||||
'calculate-grades',
|
||||
{
|
||||
studentId,
|
||||
academicYearId,
|
||||
timestamp: new Date().toISOString(),
|
||||
...options,
|
||||
},
|
||||
{
|
||||
priority: options.priority || 4,
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`✅ Grade calculation job added: ${job.id}`);
|
||||
return job;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error adding grade calculation job:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get queue metrics
|
||||
*/
|
||||
const getQueueMetrics = async (queueName) => {
|
||||
try {
|
||||
const queue = queues[queueName];
|
||||
if (!queue) {
|
||||
throw new Error(`Queue ${queueName} not found`);
|
||||
}
|
||||
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
queue.getWaitingCount(),
|
||||
queue.getActiveCount(),
|
||||
queue.getCompletedCount(),
|
||||
queue.getFailedCount(),
|
||||
queue.getDelayedCount(),
|
||||
]);
|
||||
|
||||
return {
|
||||
queueName,
|
||||
waiting,
|
||||
active,
|
||||
completed,
|
||||
failed,
|
||||
delayed,
|
||||
total: waiting + active + completed + failed + delayed,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`❌ Error getting queue metrics:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Close all queues
|
||||
*/
|
||||
const closeQueues = async () => {
|
||||
try {
|
||||
await Promise.all(
|
||||
Object.values(queues).map(queue => queue.close())
|
||||
);
|
||||
console.log('✅ All BullMQ queues closed');
|
||||
} catch (error) {
|
||||
console.error('❌ Error closing queues:', error.message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
queues,
|
||||
QueueNames,
|
||||
addDatabaseWriteJob,
|
||||
addNotificationJob,
|
||||
addAttendanceProcessJob,
|
||||
addGradeCalculationJob,
|
||||
getQueueMetrics,
|
||||
closeQueues,
|
||||
bullMQConnection,
|
||||
connectionOptions,
|
||||
defaultJobOptions,
|
||||
};
|
||||
40
config/config.json
Normal file
40
config/config.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"database": {
|
||||
"host": "senaai.tech",
|
||||
"port": 11001,
|
||||
"username": "root",
|
||||
"password": "Sena@2026!",
|
||||
"database": "sena_school_db",
|
||||
"dialect": "mysql",
|
||||
"pool": {
|
||||
"max": 20,
|
||||
"min": 5,
|
||||
"acquire": 60000,
|
||||
"idle": 10000
|
||||
},
|
||||
"logging": false,
|
||||
"timezone": "+07:00"
|
||||
},
|
||||
"redis": {
|
||||
"cluster": [
|
||||
{
|
||||
"host": "senaai.tech",
|
||||
"port": 11010
|
||||
},
|
||||
{
|
||||
"host": "senaai.tech",
|
||||
"port": 11011
|
||||
}
|
||||
],
|
||||
"password": "Sena@2026!",
|
||||
"db": 0,
|
||||
"keyPrefix": "sena:"
|
||||
},
|
||||
"server": {
|
||||
"port" : 4000,
|
||||
"env": "production"
|
||||
},
|
||||
"cors": {
|
||||
"origin": "*"
|
||||
}
|
||||
}
|
||||
98
config/database.js
Normal file
98
config/database.js
Normal file
@@ -0,0 +1,98 @@
|
||||
const { Sequelize } = require('sequelize');
|
||||
const config = require('./config.json');
|
||||
|
||||
/**
|
||||
* MySQL Connection Configuration via senaai.tech
|
||||
* Direct connection to MySQL server (port 11001)
|
||||
*/
|
||||
const dbConfig = config.database;
|
||||
|
||||
const sequelize = new Sequelize(
|
||||
dbConfig.database,
|
||||
dbConfig.username,
|
||||
dbConfig.password,
|
||||
{
|
||||
host: dbConfig.host,
|
||||
port: dbConfig.port,
|
||||
dialect: dbConfig.dialect,
|
||||
dialectOptions: {
|
||||
connectTimeout: 60000, // 60 seconds for slow networks
|
||||
},
|
||||
|
||||
// Connection Pool Configuration
|
||||
pool: dbConfig.pool,
|
||||
|
||||
// Logging
|
||||
logging: dbConfig.logging,
|
||||
|
||||
// Query options
|
||||
define: {
|
||||
timestamps: true,
|
||||
underscored: true,
|
||||
freezeTableName: true,
|
||||
},
|
||||
|
||||
// Timezone
|
||||
timezone: dbConfig.timezone,
|
||||
|
||||
// Retry configuration
|
||||
retry: {
|
||||
max: 3,
|
||||
timeout: 3000,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Test database connection
|
||||
*/
|
||||
const testConnection = async () => {
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
console.log(`✅ MySQL connection established successfully to ${dbConfig.host}:${dbConfig.port}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('❌ Unable to connect to MySQL:', error.message);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize database models
|
||||
*/
|
||||
const initializeDatabase = async () => {
|
||||
try {
|
||||
// Test connection first
|
||||
const isConnected = await testConnection();
|
||||
if (!isConnected) {
|
||||
throw new Error('Failed to connect to database');
|
||||
}
|
||||
|
||||
// Note: Auto-sync disabled in production for safety
|
||||
console.log('✅ Database connection ready');
|
||||
|
||||
return sequelize;
|
||||
} catch (error) {
|
||||
console.error('❌ Database initialization failed:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
const closeConnection = async () => {
|
||||
try {
|
||||
await sequelize.close();
|
||||
console.log('✅ Database connection closed');
|
||||
} catch (error) {
|
||||
console.error('❌ Error closing database connection:', error.message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
sequelize,
|
||||
testConnection,
|
||||
initializeDatabase,
|
||||
closeConnection,
|
||||
};
|
||||
169
config/redis.js
Normal file
169
config/redis.js
Normal file
@@ -0,0 +1,169 @@
|
||||
const Redis = require('ioredis');
|
||||
const config = require('./config.json');
|
||||
|
||||
/**
|
||||
* Redis Connection - Direct connection to master
|
||||
* For Sentinel HA from external clients, we connect directly to the master port
|
||||
* and rely on manual failover by trying both ports
|
||||
*/
|
||||
const redisConfig = config.redis;
|
||||
|
||||
// Direct connection to master (port 11010)
|
||||
const redisClient = new Redis({
|
||||
host: redisConfig.cluster[0].host,
|
||||
port: redisConfig.cluster[0].port,
|
||||
password: redisConfig.password,
|
||||
db: redisConfig.db,
|
||||
keyPrefix: redisConfig.keyPrefix,
|
||||
connectTimeout: 10000,
|
||||
|
||||
retryStrategy: (times) => {
|
||||
if (times > 10) {
|
||||
console.log(`⚠️ Redis retry exhausted after ${times} attempts`);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, 3000);
|
||||
console.log(`🔄 Redis retry attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
|
||||
// Reconnect on READONLY error (slave promoted)
|
||||
reconnectOnError: (err) => {
|
||||
if (err.message.includes('READONLY')) {
|
||||
console.log('⚠️ READONLY error detected - slave may have been promoted');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
enableOfflineQueue: true,
|
||||
maxRetriesPerRequest: null,
|
||||
enableReadyCheck: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* Redis Event Handlers
|
||||
*/
|
||||
redisClient.on('connect', () => {
|
||||
console.log('✅ Redis client connected');
|
||||
});
|
||||
|
||||
redisClient.on('ready', () => {
|
||||
console.log('✅ Redis client ready');
|
||||
});
|
||||
|
||||
redisClient.on('error', (err) => {
|
||||
console.error('❌ Redis error:', err.message);
|
||||
});
|
||||
|
||||
redisClient.on('close', () => {
|
||||
console.log('⚠️ Redis client closed');
|
||||
});
|
||||
|
||||
redisClient.on('reconnecting', () => {
|
||||
console.log('🔄 Redis client reconnecting...');
|
||||
});
|
||||
|
||||
/**
|
||||
* Cache utility functions
|
||||
*/
|
||||
const cacheUtils = {
|
||||
/**
|
||||
* Get value from cache
|
||||
*/
|
||||
get: async (key) => {
|
||||
try {
|
||||
const value = await redisClient.get(key);
|
||||
return value ? JSON.parse(value) : null;
|
||||
} catch (error) {
|
||||
console.error(`Error getting cache for key ${key}:`, error.message);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Set value to cache with TTL
|
||||
*/
|
||||
set: async (key, value, ttl = 3600) => {
|
||||
try {
|
||||
const serialized = JSON.stringify(value);
|
||||
await redisClient.setex(key, ttl, serialized);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error setting cache for key ${key}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete cache by key
|
||||
*/
|
||||
delete: async (key) => {
|
||||
try {
|
||||
await redisClient.del(key);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error deleting cache for key ${key}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete cache by pattern
|
||||
*/
|
||||
deletePattern: async (pattern) => {
|
||||
try {
|
||||
const keys = await redisClient.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await redisClient.del(...keys);
|
||||
}
|
||||
return keys.length;
|
||||
} catch (error) {
|
||||
console.error(`Error deleting cache pattern ${pattern}:`, error.message);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if key exists
|
||||
*/
|
||||
exists: async (key) => {
|
||||
try {
|
||||
const result = await redisClient.exists(key);
|
||||
return result === 1;
|
||||
} catch (error) {
|
||||
console.error(`Error checking cache existence for key ${key}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get TTL for key
|
||||
*/
|
||||
ttl: async (key) => {
|
||||
try {
|
||||
return await redisClient.ttl(key);
|
||||
} catch (error) {
|
||||
console.error(`Error getting TTL for key ${key}:`, error.message);
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Close Redis connection
|
||||
*/
|
||||
const closeRedisConnection = async () => {
|
||||
try {
|
||||
await redisClient.quit();
|
||||
console.log('✅ Redis connection closed gracefully');
|
||||
} catch (error) {
|
||||
console.error('❌ Error closing Redis connection:', error.message);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
redisClient,
|
||||
cacheUtils,
|
||||
closeRedisConnection,
|
||||
};
|
||||
Reference in New Issue
Block a user