mirror of
https://github.com/pacnpal/markov-discord.git
synced 2025-12-23 04:11:04 -05:00
feat: Implement configuration management and logging for Markov bot
- Added AppConfig class to manage application configuration with environment variable support. - Introduced JSON5 support for configuration files, allowing both .json and .json5 extensions. - Implemented logging using Pino with pretty-printing for better readability. - Created a MarkovStore class for efficient storage and retrieval of Markov chains with O(1) sampling. - Developed a WorkerPool class to manage worker threads for parallel processing of tasks. - Added methods for building chains, generating responses, and handling task submissions in the worker pool. - Included validation for configuration using class-validator to ensure correctness. - Established a clear structure for configuration, logging, and Markov chain management.
This commit is contained in:
307
temp_bench/bench/load_test.js
Normal file
307
temp_bench/bench/load_test.js
Normal file
@@ -0,0 +1,307 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TestDataGenerator = exports.LoadTester = void 0;
|
||||
require("reflect-metadata");
|
||||
/**
|
||||
* Markov Discord Load Testing Script
|
||||
*
|
||||
* This script performs load testing on the Markov Discord bot to measure
|
||||
* performance under various loads and configurations.
|
||||
*/
|
||||
require("source-map-support/register");
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const markov_store_1 = require("../src/markov-store");
|
||||
const worker_pool_1 = require("../src/workers/worker-pool");
|
||||
const promises_1 = __importDefault(require("fs/promises"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
// Default configuration
|
||||
const defaultConfig = {
|
||||
duration: 60,
|
||||
concurrency: 10,
|
||||
warmupTime: 5,
|
||||
guildId: 'load-test-guild',
|
||||
testDataSize: 1000,
|
||||
outputFile: `load_test_${new Date().toISOString().replace(/:/g, '-')}.json`,
|
||||
useOptimized: true
|
||||
};
|
||||
// Test data generator
|
||||
class TestDataGenerator {
|
||||
constructor() {
|
||||
this.words = [
|
||||
'hello', 'world', 'this', 'is', 'a', 'test', 'message', 'for', 'performance',
|
||||
'testing', 'with', 'many', 'different', 'words', 'and', 'phrases', 'that',
|
||||
'simulate', 'real', 'conversation', 'patterns', 'in', 'discord', 'channels',
|
||||
'where', 'people', 'talk', 'about', 'various', 'topics', 'like', 'gaming',
|
||||
'programming', 'music', 'movies', 'books', 'sports', 'technology', 'science'
|
||||
];
|
||||
}
|
||||
generateMessage() {
|
||||
const length = Math.floor(Math.random() * 15) + 3; // 3-17 words
|
||||
const message = [];
|
||||
for (let i = 0; i < length; i++) {
|
||||
message.push(this.words[Math.floor(Math.random() * this.words.length)]);
|
||||
}
|
||||
return message.join(' ');
|
||||
}
|
||||
generateTrainingData(count) {
|
||||
const data = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
data.push({ message: this.generateMessage() });
|
||||
}
|
||||
return data;
|
||||
}
|
||||
generatePrefixes(count) {
|
||||
const prefixes = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
const length = Math.floor(Math.random() * 2) + 1; // 1-2 words
|
||||
const prefix = [];
|
||||
for (let j = 0; j < length; j++) {
|
||||
prefix.push(this.words[Math.floor(Math.random() * this.words.length)]);
|
||||
}
|
||||
prefixes.push(prefix.join(' '));
|
||||
}
|
||||
return prefixes;
|
||||
}
|
||||
}
|
||||
exports.TestDataGenerator = TestDataGenerator;
|
||||
// Load tester class
|
||||
class LoadTester {
|
||||
constructor(config) {
|
||||
this.results = [];
|
||||
this.errors = [];
|
||||
this.startTime = 0;
|
||||
this.endTime = 0;
|
||||
this.config = config;
|
||||
this.generator = new TestDataGenerator();
|
||||
this.memoryStart = process.memoryUsage();
|
||||
this.memoryPeak = { ...this.memoryStart };
|
||||
}
|
||||
// Update memory peak
|
||||
updateMemoryPeak() {
|
||||
const current = process.memoryUsage();
|
||||
if (current.heapUsed > this.memoryPeak.heapUsed) {
|
||||
this.memoryPeak = current;
|
||||
}
|
||||
}
|
||||
// Generate training data
|
||||
async setupTrainingData() {
|
||||
console.log(`Generating ${this.config.testDataSize} training messages...`);
|
||||
const messages = this.generator.generateTrainingData(this.config.testDataSize);
|
||||
const trainingData = [];
|
||||
for (const msg of messages) {
|
||||
const words = msg.message.split(' ');
|
||||
for (let i = 0; i < words.length - 1; i++) {
|
||||
trainingData.push({
|
||||
prefix: words[i],
|
||||
suffix: words[i + 1],
|
||||
weight: 1
|
||||
});
|
||||
}
|
||||
}
|
||||
console.log(`Generated ${trainingData.length} training pairs`);
|
||||
return trainingData;
|
||||
}
|
||||
// Build chains (training phase)
|
||||
async buildChains() {
|
||||
console.log('Building Markov chains...');
|
||||
if (this.config.useOptimized) {
|
||||
const workerPool = (0, worker_pool_1.getWorkerPool)(2);
|
||||
const trainingData = await this.setupTrainingData();
|
||||
// Split data into chunks for workers
|
||||
const chunkSize = Math.ceil(trainingData.length / 2);
|
||||
const chunk1 = trainingData.slice(0, chunkSize);
|
||||
const chunk2 = trainingData.slice(chunkSize);
|
||||
const [result1, result2] = await Promise.all([
|
||||
workerPool.buildChains(this.config.guildId, chunk1, true, 2),
|
||||
workerPool.buildChains(this.config.guildId, chunk2, false, 2)
|
||||
]);
|
||||
console.log(`Chains built: ${result1.processedCount + result2.processedCount} entries`);
|
||||
}
|
||||
else {
|
||||
// Fallback to basic implementation
|
||||
const store = new markov_store_1.MarkovStore(this.config.guildId);
|
||||
await store.load();
|
||||
store.clear();
|
||||
const trainingData = await this.setupTrainingData();
|
||||
for (const item of trainingData) {
|
||||
store.addPrefix(item.prefix, item.suffix, item.weight);
|
||||
}
|
||||
await store.save();
|
||||
console.log('Basic training completed');
|
||||
}
|
||||
}
|
||||
// Run generation load test
|
||||
async runGenerationTest() {
|
||||
console.log(`Starting load test: ${this.config.duration}s duration, ${this.config.concurrency} concurrency`);
|
||||
const prefixes = this.generator.generatePrefixes(1000);
|
||||
const endTime = Date.now() + (this.config.duration * 1000);
|
||||
this.startTime = perf_hooks_1.performance.now();
|
||||
// Warmup phase
|
||||
if (this.config.warmupTime > 0) {
|
||||
console.log(`Warmup phase: ${this.config.warmupTime} seconds`);
|
||||
await new Promise(resolve => setTimeout(resolve, this.config.warmupTime * 1000));
|
||||
}
|
||||
// Load test phase
|
||||
const promises = [];
|
||||
for (let i = 0; i < this.config.concurrency; i++) {
|
||||
promises.push(this.generateLoad(i, prefixes, endTime));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
this.endTime = perf_hooks_1.performance.now();
|
||||
console.log('Load test completed');
|
||||
}
|
||||
// Generate load for a single worker
|
||||
async generateLoad(workerId, prefixes, endTime) {
|
||||
const latencies = [];
|
||||
while (Date.now() < endTime) {
|
||||
const start = perf_hooks_1.performance.now();
|
||||
try {
|
||||
if (this.config.useOptimized) {
|
||||
// Use worker pool
|
||||
const workerPool = (0, worker_pool_1.getWorkerPool)(2);
|
||||
const prefix = prefixes[Math.floor(Math.random() * prefixes.length)];
|
||||
await workerPool.generateResponse(this.config.guildId, prefix, 30, 1.0, 1);
|
||||
}
|
||||
else {
|
||||
// Use basic store
|
||||
const store = new markov_store_1.MarkovStore(this.config.guildId);
|
||||
await store.load();
|
||||
const prefix = prefixes[Math.floor(Math.random() * prefixes.length)];
|
||||
store.generate(prefix, 30);
|
||||
}
|
||||
const latency = perf_hooks_1.performance.now() - start;
|
||||
latencies.push(latency);
|
||||
this.results.push(latency);
|
||||
this.updateMemoryPeak();
|
||||
}
|
||||
catch (error) {
|
||||
this.errors.push(`Worker ${workerId}: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
// Small delay to avoid overwhelming the system
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
}
|
||||
console.log(`Worker ${workerId}: completed ${latencies.length} requests`);
|
||||
}
|
||||
// Calculate statistics
|
||||
calculateStats() {
|
||||
if (this.results.length === 0) {
|
||||
return {
|
||||
totalRequests: 0,
|
||||
successfulRequests: 0,
|
||||
failedRequests: this.errors.length,
|
||||
requestsPerSecond: 0,
|
||||
averageLatency: 0,
|
||||
minLatency: 0,
|
||||
maxLatency: 0,
|
||||
p95Latency: 0,
|
||||
p99Latency: 0
|
||||
};
|
||||
}
|
||||
const sortedLatencies = [...this.results].sort((a, b) => a - b);
|
||||
const totalTime = this.endTime - this.startTime;
|
||||
const p95Index = Math.floor(sortedLatencies.length * 0.95);
|
||||
const p99Index = Math.floor(sortedLatencies.length * 0.99);
|
||||
return {
|
||||
totalRequests: this.results.length,
|
||||
successfulRequests: this.results.length,
|
||||
failedRequests: this.errors.length,
|
||||
requestsPerSecond: (this.results.length / totalTime) * 1000,
|
||||
averageLatency: this.results.reduce((sum, lat) => sum + lat, 0) / this.results.length,
|
||||
minLatency: sortedLatencies[0],
|
||||
maxLatency: sortedLatencies[sortedLatencies.length - 1],
|
||||
p95Latency: sortedLatencies[p95Index] || 0,
|
||||
p99Latency: sortedLatencies[p99Index] || 0
|
||||
};
|
||||
}
|
||||
// Run complete load test
|
||||
async run() {
|
||||
console.log('=== Markov Discord Load Test ===');
|
||||
console.log('Configuration:', JSON.stringify(this.config, null, 2));
|
||||
try {
|
||||
// Build chains
|
||||
await this.buildChains();
|
||||
// Run load test
|
||||
await this.runGenerationTest();
|
||||
// Calculate results
|
||||
const summary = this.calculateStats();
|
||||
const memoryEnd = process.memoryUsage();
|
||||
const result = {
|
||||
config: this.config,
|
||||
summary,
|
||||
latencies: this.results,
|
||||
errors: this.errors,
|
||||
memoryUsage: {
|
||||
start: this.memoryStart,
|
||||
end: memoryEnd,
|
||||
peak: this.memoryPeak
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
// Save results
|
||||
await promises_1.default.writeFile(path_1.default.join(process.cwd(), this.config.outputFile), JSON.stringify(result, null, 2));
|
||||
console.log('\n=== Load Test Results ===');
|
||||
console.log(`Total Requests: ${summary.totalRequests}`);
|
||||
console.log(`Requests/sec: ${summary.requestsPerSecond.toFixed(2)}`);
|
||||
console.log(`Average Latency: ${summary.averageLatency.toFixed(2)}ms`);
|
||||
console.log(`Min Latency: ${summary.minLatency.toFixed(2)}ms`);
|
||||
console.log(`Max Latency: ${summary.maxLatency.toFixed(2)}ms`);
|
||||
console.log(`95th Percentile: ${summary.p95Latency.toFixed(2)}ms`);
|
||||
console.log(`99th Percentile: ${summary.p99Latency.toFixed(2)}ms`);
|
||||
console.log(`Failed Requests: ${summary.failedRequests}`);
|
||||
console.log(`Memory Usage: ${((memoryEnd.heapUsed - this.memoryStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`);
|
||||
console.log(`Results saved to: ${this.config.outputFile}`);
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Load test failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.LoadTester = LoadTester;
|
||||
// CLI interface
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
// Parse command line arguments
|
||||
const config = { ...defaultConfig };
|
||||
for (let i = 0; i < args.length; i += 2) {
|
||||
const key = args[i].replace('--', '');
|
||||
const value = args[i + 1];
|
||||
if (value !== undefined) {
|
||||
switch (key) {
|
||||
case 'duration':
|
||||
config.duration = parseInt(value);
|
||||
break;
|
||||
case 'concurrency':
|
||||
config.concurrency = parseInt(value);
|
||||
break;
|
||||
case 'warmup':
|
||||
config.warmupTime = parseInt(value);
|
||||
break;
|
||||
case 'guild':
|
||||
config.guildId = value;
|
||||
break;
|
||||
case 'data-size':
|
||||
config.testDataSize = parseInt(value);
|
||||
break;
|
||||
case 'output':
|
||||
config.outputFile = value;
|
||||
break;
|
||||
case 'optimized':
|
||||
config.useOptimized = value === 'true';
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Run load test
|
||||
const tester = new LoadTester(config);
|
||||
await tester.run();
|
||||
}
|
||||
// Handle CLI execution
|
||||
if (require.main === module) {
|
||||
main().catch(console.error);
|
||||
}
|
||||
359
temp_bench/src/config/classes.js
Normal file
359
temp_bench/src/config/classes.js
Normal file
@@ -0,0 +1,359 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AppConfig = exports.LogLevel = void 0;
|
||||
/* eslint-disable @typescript-eslint/no-empty-function, no-useless-constructor, max-classes-per-file */
|
||||
require("reflect-metadata");
|
||||
const class_transformer_1 = require("class-transformer");
|
||||
const class_validator_1 = require("class-validator");
|
||||
var LogLevel;
|
||||
(function (LogLevel) {
|
||||
LogLevel["SILENT"] = "silent";
|
||||
LogLevel["ERROR"] = "error";
|
||||
LogLevel["WARN"] = "warn";
|
||||
LogLevel["INFO"] = "info";
|
||||
LogLevel["DEBUG"] = "debug";
|
||||
LogLevel["TRACE"] = "trace";
|
||||
})(LogLevel || (exports.LogLevel = LogLevel = {}));
|
||||
/**
|
||||
* The config file supports [JSON5](https://json5.org/) syntax. It supports both `.json` and `.json5` extensions if you prefer one over the other.
|
||||
* @example ```jsonc
|
||||
* {
|
||||
* "token": "k5NzE2NDg1MTIwMjc0ODQ0Nj.DSnXwg.ttNotARealToken5p3WfDoUxhiH",
|
||||
* "commandPrefix": "!mark",
|
||||
* "activity": "\"!mark help\" for help",
|
||||
* "ownerIds": ["00000000000000000"],
|
||||
* "logLevel": "info",
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class AppConfig {
|
||||
constructor() {
|
||||
/**
|
||||
* Your Discord bot token
|
||||
* @example k5NzE2NDg1MTIwMjc0ODQ0Nj.DSnXwg.ttNotARealToken5p3WfDoUxhiH
|
||||
* @env TOKEN
|
||||
*/
|
||||
this.token = process.env.TOKEN || '';
|
||||
/**
|
||||
* The command prefix used to trigger the bot commands (when not using slash commands)
|
||||
* @example !bot
|
||||
* @default !mark
|
||||
* @env MESSAGE_COMMAND_PREFIX
|
||||
*/
|
||||
this.messageCommandPrefix = process.env.MESSAGE_COMMAND_PREFIX || '!mark';
|
||||
/**
|
||||
* The slash command name to generate a message from the bot. (e.g. `/mark`)
|
||||
* @example message
|
||||
* @default mark
|
||||
* @env SLASH_COMMAND_NAME
|
||||
*/
|
||||
this.slashCommandName = process.env.SLASH_COMMAND_NAME || 'mark';
|
||||
/**
|
||||
* The activity status shown under the bot's name in the user list
|
||||
* @example "!mark help" for help
|
||||
* @default !mark help
|
||||
* @env ACTIVITY
|
||||
*/
|
||||
this.activity = process.env.ACTIVITY || '!mark help';
|
||||
/**
|
||||
* A list of Discord user IDs that have owner permissions for the bot
|
||||
* @example ["82684276755136512"]
|
||||
* @default []
|
||||
* @env OWNER_IDS (comma separated)
|
||||
*/
|
||||
this.ownerIds = process.env.OWNER_IDS ? process.env.OWNER_IDS.split(',').map((id) => id.trim()) : [];
|
||||
/**
|
||||
* If provided, the standard "generate response" command will only work for a user in this list of role IDs.
|
||||
* Moderators and owners configured in `ownerIds` do not bypass this check, so make sure to add them to a valid role as well.
|
||||
* @example ["734548250895319070"]
|
||||
* @default []
|
||||
* @env USER_ROLE_IDS (comma separated)
|
||||
*/
|
||||
this.userRoleIds = process.env.USER_ROLE_IDS
|
||||
? process.env.USER_ROLE_IDS.split(',').map((id) => id.trim())
|
||||
: [];
|
||||
/**
|
||||
* TZ name from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
* @example America/Chicago
|
||||
* @default UTC
|
||||
* @env TZ
|
||||
*/
|
||||
this.timezone = process.env.TZ || 'UTC';
|
||||
/**
|
||||
* Log level in lower case. Can be [silent, error, warn, info, debug, trace]
|
||||
* @example debug
|
||||
* @default info
|
||||
* @env LOG_LEVEL
|
||||
*/
|
||||
this.logLevel = process.env.LOG_LEVEL || LogLevel.INFO;
|
||||
/**
|
||||
* The stateSize is the number of words for each "link" of the generated sentence.
|
||||
* 1 will output gibberish sentences without much sense.
|
||||
* 2 is a sensible default for most cases.
|
||||
* 3 and more can create good sentences if you have a corpus that allows it.
|
||||
* @example 3
|
||||
* @default 2
|
||||
* @env STATE_SIZE
|
||||
*/
|
||||
this.stateSize = process.env.STATE_SIZE ? parseInt(process.env.STATE_SIZE, 10) : 2;
|
||||
/**
|
||||
* The number of tries the sentence generator will try before giving up
|
||||
* @example 2000
|
||||
* @default 1000
|
||||
* @env MAX_TRIES
|
||||
*/
|
||||
this.maxTries = process.env.MAX_TRIES ? parseInt(process.env.MAX_TRIES, 10) : 1000;
|
||||
/**
|
||||
* The minimum score required when generating a sentence.
|
||||
* A relative "score" based on the number of possible permutations.
|
||||
* Higher is "better", but the actual value depends on your corpus.
|
||||
* @example 15
|
||||
* @default 10
|
||||
* @env MIN_SCORE
|
||||
*/
|
||||
this.minScore = process.env.MIN_SCORE ? parseInt(process.env.MIN_SCORE, 10) : 10;
|
||||
/**
|
||||
* This guild ID should be declared if you want its commands to update immediately during development
|
||||
* @example 1234567890
|
||||
* @env DEV_GUILD_ID
|
||||
*/
|
||||
this.devGuildId = process.env.DEV_GUILD_ID;
|
||||
/**
|
||||
* A list of channel IDs where the bot will respond to mentions.
|
||||
* If empty, the bot will respond to mentions in any channel.
|
||||
* @example ["734548250895319070"]
|
||||
* @default []
|
||||
* @env RESPONSE_CHANNEL_IDS (comma separated)
|
||||
*/
|
||||
this.responseChannelIds = process.env.RESPONSE_CHANNEL_IDS
|
||||
? process.env.RESPONSE_CHANNEL_IDS.split(',').map((id) => id.trim())
|
||||
: [];
|
||||
// ===== PERFORMANCE OPTIMIZATION SETTINGS =====
|
||||
/**
|
||||
* Enable the optimized MarkovStore with O(1) alias method sampling
|
||||
* When enabled, replaces markov-strings-db with serialized chain store
|
||||
* @example true
|
||||
* @default false
|
||||
* @env ENABLE_MARKOV_STORE
|
||||
*/
|
||||
this.enableMarkovStore = process.env.ENABLE_MARKOV_STORE === 'true' || false;
|
||||
/**
|
||||
* Enable worker thread pool for CPU-intensive operations
|
||||
* Offloads chain building and generation to background threads
|
||||
* @example true
|
||||
* @default false
|
||||
* @env ENABLE_WORKER_POOL
|
||||
*/
|
||||
this.enableWorkerPool = process.env.ENABLE_WORKER_POOL === 'true' || false;
|
||||
/**
|
||||
* Number of worker threads for the worker pool
|
||||
* Recommended: Number of CPU cores or 4, whichever is smaller
|
||||
* @example 4
|
||||
* @default 4
|
||||
* @env WORKER_POOL_SIZE
|
||||
*/
|
||||
this.workerPoolSize = process.env.WORKER_POOL_SIZE ? parseInt(process.env.WORKER_POOL_SIZE, 10) : 4;
|
||||
/**
|
||||
* Enable batch processing optimizations in training
|
||||
* Uses worker pool for parallel batch processing
|
||||
* @example true
|
||||
* @default false
|
||||
* @env ENABLE_BATCH_OPTIMIZATION
|
||||
*/
|
||||
this.enableBatchOptimization = process.env.ENABLE_BATCH_OPTIMIZATION === 'true' || false;
|
||||
/**
|
||||
* Batch size for training operations
|
||||
* Higher values use more memory but may be more efficient
|
||||
* @example 2000
|
||||
* @default 2000
|
||||
* @env BATCH_SIZE
|
||||
*/
|
||||
this.batchSize = process.env.BATCH_SIZE ? parseInt(process.env.BATCH_SIZE, 10) : 2000;
|
||||
/**
|
||||
* Memory limit for chain caching (in MB)
|
||||
* MarkovStore will use LRU eviction when this limit is reached
|
||||
* @example 256
|
||||
* @default 128
|
||||
* @env CHAIN_CACHE_MEMORY_LIMIT
|
||||
*/
|
||||
this.chainCacheMemoryLimit = process.env.CHAIN_CACHE_MEMORY_LIMIT ? parseInt(process.env.CHAIN_CACHE_MEMORY_LIMIT, 10) : 128;
|
||||
/**
|
||||
* Debounce delay for chain persistence (in milliseconds)
|
||||
* Higher values reduce disk I/O but increase risk of data loss
|
||||
* @example 5000
|
||||
* @default 5000
|
||||
* @env CHAIN_SAVE_DEBOUNCE_MS
|
||||
*/
|
||||
this.chainSaveDebounceMs = process.env.CHAIN_SAVE_DEBOUNCE_MS ? parseInt(process.env.CHAIN_SAVE_DEBOUNCE_MS, 10) : 5000;
|
||||
/**
|
||||
* Percentage of guilds to enable optimizations for (0-100)
|
||||
* Allows gradual rollout of performance optimizations
|
||||
* @example 10
|
||||
* @default 0
|
||||
* @env OPTIMIZATION_ROLLOUT_PERCENTAGE
|
||||
*/
|
||||
this.optimizationRolloutPercentage = process.env.OPTIMIZATION_ROLLOUT_PERCENTAGE ? parseFloat(process.env.OPTIMIZATION_ROLLOUT_PERCENTAGE) : 0;
|
||||
/**
|
||||
* List of guild IDs to force-enable optimizations for (canary testing)
|
||||
* These guilds will always use optimizations regardless of rollout percentage
|
||||
* @example ["1234567890", "0987654321"]
|
||||
* @default []
|
||||
* @env OPTIMIZATION_FORCE_GUILD_IDS (comma separated)
|
||||
*/
|
||||
this.optimizationForceGuildIds = process.env.OPTIMIZATION_FORCE_GUILD_IDS
|
||||
? process.env.OPTIMIZATION_FORCE_GUILD_IDS.split(',').map((id) => id.trim())
|
||||
: [];
|
||||
/**
|
||||
* Enable performance monitoring and metrics collection
|
||||
* Collects timing data for optimization validation
|
||||
* @example true
|
||||
* @default false
|
||||
* @env ENABLE_PERFORMANCE_MONITORING
|
||||
*/
|
||||
this.enablePerformanceMonitoring = process.env.ENABLE_PERFORMANCE_MONITORING === 'true' || false;
|
||||
}
|
||||
}
|
||||
exports.AppConfig = AppConfig;
|
||||
__decorate([
|
||||
(0, class_validator_1.IsDefined)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
(0, class_validator_1.IsNotEmpty)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "token", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "messageCommandPrefix", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "slashCommandName", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "activity", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsArray)(),
|
||||
(0, class_validator_1.IsString)({ each: true }),
|
||||
(0, class_transformer_1.Type)(() => String),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "ownerIds", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsArray)(),
|
||||
(0, class_validator_1.IsString)({ each: true }),
|
||||
(0, class_transformer_1.Type)(() => String),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "userRoleIds", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "timezone", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsEnum)(LogLevel),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "logLevel", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "stateSize", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "maxTries", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "minScore", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "devGuildId", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsArray)(),
|
||||
(0, class_validator_1.IsString)({ each: true }),
|
||||
(0, class_transformer_1.Type)(() => String),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "responseChannelIds", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsBoolean)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "enableMarkovStore", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsBoolean)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "enableWorkerPool", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
(0, class_validator_1.Min)(1),
|
||||
(0, class_validator_1.Max)(16),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "workerPoolSize", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsBoolean)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "enableBatchOptimization", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
(0, class_validator_1.Min)(100),
|
||||
(0, class_validator_1.Max)(10000),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "batchSize", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
(0, class_validator_1.Min)(64),
|
||||
(0, class_validator_1.Max)(2048),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "chainCacheMemoryLimit", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsInt)(),
|
||||
(0, class_validator_1.Min)(1000),
|
||||
(0, class_validator_1.Max)(30000),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "chainSaveDebounceMs", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsNumber)(),
|
||||
(0, class_validator_1.Min)(0),
|
||||
(0, class_validator_1.Max)(100),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "optimizationRolloutPercentage", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsArray)(),
|
||||
(0, class_validator_1.IsString)({ each: true }),
|
||||
(0, class_transformer_1.Type)(() => String),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "optimizationForceGuildIds", void 0);
|
||||
__decorate([
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
(0, class_validator_1.IsBoolean)(),
|
||||
__metadata("design:type", Object)
|
||||
], AppConfig.prototype, "enablePerformanceMonitoring", void 0);
|
||||
18
temp_bench/src/config/index.js
Normal file
18
temp_bench/src/config/index.js
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__exportStar(require("./classes"), exports);
|
||||
__exportStar(require("./setup"), exports);
|
||||
76
temp_bench/src/config/setup.js
Normal file
76
temp_bench/src/config/setup.js
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.config = exports.CONFIG_FILE_NAME = exports.CONFIG_DIR = void 0;
|
||||
require("reflect-metadata");
|
||||
require("dotenv/config");
|
||||
const json5_1 = __importDefault(require("json5"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const fs_extra_1 = __importDefault(require("fs-extra"));
|
||||
const class_validator_1 = require("class-validator");
|
||||
const class_transformer_1 = require("class-transformer");
|
||||
const pino_1 = __importDefault(require("pino"));
|
||||
const classes_1 = require("./classes");
|
||||
// Declare pino logger as importing would cause dependency cycle
|
||||
const L = (0, pino_1.default)({
|
||||
transport: {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
translateTime: `SYS:standard`,
|
||||
},
|
||||
},
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label };
|
||||
},
|
||||
},
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
base: undefined,
|
||||
});
|
||||
// TODO: Add YAML parser
|
||||
const EXTENSIONS = ['.json', '.json5']; // Allow .json or .json5 extension
|
||||
const removeFileExtension = (filename) => {
|
||||
const ext = path_1.default.extname(filename);
|
||||
if (EXTENSIONS.includes(ext)) {
|
||||
return path_1.default.basename(filename, ext);
|
||||
}
|
||||
return path_1.default.basename(filename);
|
||||
};
|
||||
exports.CONFIG_DIR = process.env.CONFIG_DIR || 'config';
|
||||
exports.CONFIG_FILE_NAME = process.env.CONFIG_FILE_NAME
|
||||
? removeFileExtension(process.env.CONFIG_FILE_NAME)
|
||||
: 'config';
|
||||
const configPaths = EXTENSIONS.map((ext) => path_1.default.resolve(exports.CONFIG_DIR, `${exports.CONFIG_FILE_NAME}${ext}`));
|
||||
const configPath = configPaths.find((p) => fs_extra_1.default.existsSync(p));
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
let config;
|
||||
if (!configPath) {
|
||||
L.warn('No config file detected');
|
||||
const newConfigPath = path_1.default.resolve(exports.CONFIG_DIR, `${exports.CONFIG_FILE_NAME}.json`);
|
||||
exports.config = config = new classes_1.AppConfig();
|
||||
try {
|
||||
L.info({ newConfigPath }, 'Creating new config file');
|
||||
fs_extra_1.default.writeJSONSync(newConfigPath, (0, class_transformer_1.instanceToPlain)(config), { spaces: 2 });
|
||||
L.info({ newConfigPath }, 'Wrote new default config file');
|
||||
}
|
||||
catch (err) {
|
||||
L.info(err, 'Not allowed to create new config. Continuing...');
|
||||
}
|
||||
}
|
||||
else {
|
||||
L.debug({ configPath });
|
||||
const parsedConfig = json5_1.default.parse(fs_extra_1.default.readFileSync(configPath, 'utf8'));
|
||||
exports.config = config = (0, class_transformer_1.plainToInstance)(classes_1.AppConfig, parsedConfig);
|
||||
}
|
||||
const errors = (0, class_validator_1.validateSync)(config, {
|
||||
validationError: {
|
||||
target: false,
|
||||
},
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
L.error({ errors }, 'Validation error(s)');
|
||||
throw new Error('Invalid config');
|
||||
}
|
||||
L.debug({ config: (0, class_transformer_1.instanceToPlain)(config) });
|
||||
21
temp_bench/src/logger.js
Normal file
21
temp_bench/src/logger.js
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
require("dotenv/config");
|
||||
const pino_1 = __importDefault(require("pino"));
|
||||
const pino_pretty_1 = __importDefault(require("pino-pretty"));
|
||||
const config_1 = require("./config");
|
||||
const logger = (0, pino_1.default)({
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label };
|
||||
},
|
||||
},
|
||||
level: config_1.config.logLevel,
|
||||
base: undefined,
|
||||
}, (0, pino_pretty_1.default)({
|
||||
translateTime: `SYS:standard`,
|
||||
}));
|
||||
exports.default = logger;
|
||||
293
temp_bench/src/markov-store.js
Normal file
293
temp_bench/src/markov-store.js
Normal file
@@ -0,0 +1,293 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.MarkovStore = void 0;
|
||||
exports.getMarkovStore = getMarkovStore;
|
||||
exports.clearAllStores = clearAllStores;
|
||||
require("source-map-support/register");
|
||||
const promises_1 = __importDefault(require("fs/promises"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const setup_1 = require("./config/setup");
|
||||
const logger_1 = __importDefault(require("./logger"));
|
||||
/**
|
||||
* Markov Store - High-performance serialized chain storage with alias method sampling
|
||||
*
|
||||
* This replaces database queries with O(1) serialized lookups and uses the alias method
|
||||
* for constant-time weighted random sampling instead of O(n) weighted selection.
|
||||
*/
|
||||
class MarkovStore {
|
||||
constructor(guildId) {
|
||||
this.chains = new Map();
|
||||
this.dirty = false;
|
||||
this.saveTimer = null;
|
||||
this.SAVE_DEBOUNCE_MS = 5000;
|
||||
this.storePath = path_1.default.join(setup_1.CONFIG_DIR, `markov_${guildId}.json`);
|
||||
}
|
||||
/**
|
||||
* Load chains from serialized storage
|
||||
*/
|
||||
async load() {
|
||||
try {
|
||||
const data = await promises_1.default.readFile(this.storePath, 'utf-8');
|
||||
const parsed = JSON.parse(data);
|
||||
this.chains.clear();
|
||||
for (const [key, value] of Object.entries(parsed)) {
|
||||
this.chains.set(key, value);
|
||||
}
|
||||
logger_1.default.info({ chainCount: this.chains.size }, 'Loaded Markov chains from store');
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
logger_1.default.info('No existing chain store found, starting fresh');
|
||||
}
|
||||
else {
|
||||
logger_1.default.error({ err }, 'Error loading Markov store');
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Save chains to serialized storage with debouncing
|
||||
*/
|
||||
async save() {
|
||||
if (!this.dirty)
|
||||
return;
|
||||
try {
|
||||
// Cancel existing timer
|
||||
if (this.saveTimer) {
|
||||
clearTimeout(this.saveTimer);
|
||||
}
|
||||
// Debounce saves
|
||||
this.saveTimer = setTimeout(async () => {
|
||||
const data = Object.fromEntries(this.chains);
|
||||
await promises_1.default.writeFile(this.storePath, JSON.stringify(data, null, 0));
|
||||
this.dirty = false;
|
||||
logger_1.default.trace({ chainCount: this.chains.size }, 'Saved Markov chains to store');
|
||||
}, this.SAVE_DEBOUNCE_MS);
|
||||
}
|
||||
catch (err) {
|
||||
logger_1.default.error({ err }, 'Error saving Markov store');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Build alias table for O(1) weighted sampling
|
||||
* Implements the alias method: https://en.wikipedia.org/wiki/Alias_method
|
||||
*/
|
||||
buildAliasTable(suffixes) {
|
||||
const n = suffixes.length;
|
||||
if (n === 0)
|
||||
return [];
|
||||
const aliasTable = new Array(n);
|
||||
const scaledWeights = new Array(n);
|
||||
const small = [];
|
||||
const large = [];
|
||||
// Scale weights to probabilities
|
||||
const totalWeight = suffixes.reduce((sum, s) => sum + s.weight, 0);
|
||||
for (let i = 0; i < n; i++) {
|
||||
scaledWeights[i] = (suffixes[i].weight / totalWeight) * n;
|
||||
if (scaledWeights[i] < 1) {
|
||||
small.push(i);
|
||||
}
|
||||
else {
|
||||
large.push(i);
|
||||
}
|
||||
}
|
||||
// Build alias table
|
||||
for (let i = 0; i < n; i++) {
|
||||
aliasTable[i] = {
|
||||
word: suffixes[i].word,
|
||||
alias: i, // Default to self
|
||||
weight: scaledWeights[i]
|
||||
};
|
||||
}
|
||||
while (small.length > 0 && large.length > 0) {
|
||||
const l = small.pop();
|
||||
const g = large.pop();
|
||||
aliasTable[l].alias = g;
|
||||
scaledWeights[g] = scaledWeights[g] + scaledWeights[l] - 1;
|
||||
if (scaledWeights[g] < 1) {
|
||||
small.push(g);
|
||||
}
|
||||
else {
|
||||
large.push(g);
|
||||
}
|
||||
}
|
||||
// Handle remaining entries
|
||||
while (large.length > 0) {
|
||||
const g = large.pop();
|
||||
scaledWeights[g] = 1;
|
||||
}
|
||||
while (small.length > 0) {
|
||||
const l = small.pop();
|
||||
scaledWeights[l] = 1;
|
||||
}
|
||||
return aliasTable;
|
||||
}
|
||||
/**
|
||||
* Sample from alias table in O(1) time
|
||||
*/
|
||||
sampleFromAliasTable(aliasTable) {
|
||||
if (aliasTable.length === 0)
|
||||
throw new Error('Empty alias table');
|
||||
const n = aliasTable.length;
|
||||
const i = Math.floor(Math.random() * n);
|
||||
const coinToss = Math.random();
|
||||
const entry = aliasTable[i];
|
||||
return coinToss < entry.weight ? entry.word : aliasTable[entry.alias].word;
|
||||
}
|
||||
/**
|
||||
* Add or update a prefix entry
|
||||
*/
|
||||
addPrefix(prefix, suffix, weight = 1) {
|
||||
let entry = this.chains.get(prefix);
|
||||
if (!entry) {
|
||||
entry = {
|
||||
prefix,
|
||||
suffixes: [],
|
||||
totalWeight: 0
|
||||
};
|
||||
this.chains.set(prefix, entry);
|
||||
}
|
||||
// Find existing suffix or add new one
|
||||
const existingSuffix = entry.suffixes.find(s => s.word === suffix);
|
||||
if (existingSuffix) {
|
||||
existingSuffix.weight += weight;
|
||||
}
|
||||
else {
|
||||
entry.suffixes.push({ word: suffix, weight });
|
||||
}
|
||||
entry.totalWeight += weight;
|
||||
// Rebuild alias table for optimization
|
||||
if (entry.suffixes.length > 1) {
|
||||
entry.aliasTable = this.buildAliasTable(entry.suffixes);
|
||||
}
|
||||
this.dirty = true;
|
||||
this.save(); // Trigger debounced save
|
||||
}
|
||||
/**
|
||||
* Get next word for a prefix using alias method (O(1))
|
||||
*/
|
||||
getNext(prefix) {
|
||||
const entry = this.chains.get(prefix);
|
||||
if (!entry || entry.suffixes.length === 0) {
|
||||
return null;
|
||||
}
|
||||
// Use alias table for O(1) sampling if available
|
||||
if (entry.aliasTable) {
|
||||
return this.sampleFromAliasTable(entry.aliasTable);
|
||||
}
|
||||
// Fallback to weighted random selection
|
||||
const totalWeight = entry.totalWeight;
|
||||
let random = Math.random() * totalWeight;
|
||||
for (const suffix of entry.suffixes) {
|
||||
random -= suffix.weight;
|
||||
if (random <= 0) {
|
||||
return suffix.word;
|
||||
}
|
||||
}
|
||||
// Fallback to first suffix (shouldn't happen with proper weights)
|
||||
return entry.suffixes[0].word;
|
||||
}
|
||||
/**
|
||||
* Generate a sequence of words from a starting prefix
|
||||
*/
|
||||
generate(prefix, maxLength = 50) {
|
||||
const result = prefix.split(' ');
|
||||
let currentPrefix = prefix;
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const nextWord = this.getNext(currentPrefix);
|
||||
if (!nextWord)
|
||||
break;
|
||||
result.push(nextWord);
|
||||
// Update prefix for next iteration (sliding window)
|
||||
const words = result.slice(-2); // Keep last 2 words for state
|
||||
currentPrefix = words.join(' ');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Get all prefixes (for debugging/analysis)
|
||||
*/
|
||||
getAllPrefixes() {
|
||||
return Array.from(this.chains.keys());
|
||||
}
|
||||
/**
|
||||
* Get chain statistics
|
||||
*/
|
||||
getStats() {
|
||||
return {
|
||||
prefixCount: this.chains.size,
|
||||
totalSuffixes: Array.from(this.chains.values())
|
||||
.reduce((sum, entry) => sum + entry.suffixes.length, 0),
|
||||
memoryUsage: process.memoryUsage().heapUsed
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Clear all chains
|
||||
*/
|
||||
clear() {
|
||||
this.chains.clear();
|
||||
this.dirty = true;
|
||||
this.save();
|
||||
}
|
||||
/**
|
||||
* Remove a specific prefix
|
||||
*/
|
||||
removePrefix(prefix) {
|
||||
if (this.chains.delete(prefix)) {
|
||||
this.dirty = true;
|
||||
this.save();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Import chains from database format (for migration)
|
||||
*/
|
||||
async importFromDatabase(chains) {
|
||||
logger_1.default.info({ chainCount: chains.length }, 'Importing chains from database');
|
||||
for (const chain of chains) {
|
||||
this.addPrefix(chain.prefix, chain.suffix, chain.weight);
|
||||
}
|
||||
this.dirty = true;
|
||||
await this.save();
|
||||
logger_1.default.info('Chain import completed');
|
||||
}
|
||||
/**
|
||||
* Export chains to database format (for fallback)
|
||||
*/
|
||||
exportToDatabase() {
|
||||
const result = [];
|
||||
for (const [prefix, entry] of this.chains) {
|
||||
for (const suffix of entry.suffixes) {
|
||||
result.push({
|
||||
prefix,
|
||||
suffix: suffix.word,
|
||||
weight: suffix.weight
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.MarkovStore = MarkovStore;
|
||||
/**
|
||||
* Global store cache for performance
|
||||
*/
|
||||
const storeCache = new Map();
|
||||
/**
|
||||
* Get or create a Markov store for a guild
|
||||
*/
|
||||
async function getMarkovStore(guildId) {
|
||||
if (!storeCache.has(guildId)) {
|
||||
const store = new MarkovStore(guildId);
|
||||
await store.load();
|
||||
storeCache.set(guildId, store);
|
||||
}
|
||||
return storeCache.get(guildId);
|
||||
}
|
||||
/**
|
||||
* Clear all cached stores
|
||||
*/
|
||||
function clearAllStores() {
|
||||
storeCache.clear();
|
||||
}
|
||||
299
temp_bench/src/workers/worker-pool.js
Normal file
299
temp_bench/src/workers/worker-pool.js
Normal file
@@ -0,0 +1,299 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WorkerPool = void 0;
|
||||
exports.getWorkerPool = getWorkerPool;
|
||||
exports.shutdownWorkerPool = shutdownWorkerPool;
|
||||
const worker_threads_1 = require("worker_threads");
|
||||
const events_1 = require("events");
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const logger_1 = __importDefault(require("../logger"));
|
||||
/**
|
||||
* Worker pool for managing Markov worker threads
|
||||
*/
|
||||
class WorkerPool extends events_1.EventEmitter {
|
||||
constructor(maxWorkers = 4) {
|
||||
super();
|
||||
this.workers = [];
|
||||
this.taskQueue = [];
|
||||
this.activeTasks = new Map();
|
||||
this.maxWorkers = maxWorkers;
|
||||
this.workerPath = path_1.default.join(__dirname, 'markov-worker.js');
|
||||
this.initializeWorkers();
|
||||
}
|
||||
/**
|
||||
* Initialize worker threads
|
||||
*/
|
||||
async initializeWorkers() {
|
||||
logger_1.default.info({ maxWorkers: this.maxWorkers }, 'Initializing worker pool');
|
||||
for (let i = 0; i < this.maxWorkers; i++) {
|
||||
await this.createWorker(i);
|
||||
}
|
||||
logger_1.default.info({ workerCount: this.workers.length }, 'Worker pool initialized');
|
||||
}
|
||||
/**
|
||||
* Create a single worker
|
||||
*/
|
||||
async createWorker(workerId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const worker = new worker_threads_1.Worker(this.workerPath, {
|
||||
workerData: { workerId },
|
||||
});
|
||||
// Handle worker ready message
|
||||
worker.once('message', (message) => {
|
||||
if (message.success && message.result?.status === 'ready') {
|
||||
logger_1.default.info({ workerId }, 'Worker ready');
|
||||
resolve();
|
||||
}
|
||||
else {
|
||||
reject(new Error(message.error || 'Worker failed to initialize'));
|
||||
}
|
||||
});
|
||||
// Handle worker errors
|
||||
worker.on('error', (error) => {
|
||||
logger_1.default.error({ workerId, error: error.message }, 'Worker error');
|
||||
this.handleWorkerError(workerId, error);
|
||||
});
|
||||
worker.on('exit', (code) => {
|
||||
logger_1.default.warn({ workerId, code }, 'Worker exited');
|
||||
this.handleWorkerExit(workerId, code);
|
||||
});
|
||||
// Handle task results
|
||||
worker.on('message', (message) => {
|
||||
if (message.success === false || message.success === true) {
|
||||
this.handleTaskResult(message);
|
||||
}
|
||||
});
|
||||
this.workers[workerId] = worker;
|
||||
this.emit('workerCreated', workerId);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Handle worker errors
|
||||
*/
|
||||
handleWorkerError(workerId, error) {
|
||||
logger_1.default.error({ workerId, error: error.message }, 'Worker error, restarting');
|
||||
// Remove failed worker
|
||||
const worker = this.workers[workerId];
|
||||
if (worker) {
|
||||
worker.terminate();
|
||||
delete this.workers[workerId];
|
||||
}
|
||||
// Restart worker
|
||||
setTimeout(() => {
|
||||
this.createWorker(workerId).catch((err) => {
|
||||
logger_1.default.error({ workerId, error: err }, 'Failed to restart worker');
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
/**
|
||||
* Handle worker exit
|
||||
*/
|
||||
handleWorkerExit(workerId, code) {
|
||||
if (code !== 0) {
|
||||
logger_1.default.warn({ workerId, code }, 'Worker exited with non-zero code, restarting');
|
||||
setTimeout(() => {
|
||||
this.createWorker(workerId).catch((err) => {
|
||||
logger_1.default.error({ workerId, error: err }, 'Failed to restart worker');
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Handle task completion
|
||||
*/
|
||||
handleTaskResult(message) {
|
||||
const task = this.activeTasks.get(message.workerId);
|
||||
if (!task) {
|
||||
logger_1.default.warn({ workerId: message.workerId }, 'Received result for unknown task');
|
||||
return;
|
||||
}
|
||||
this.activeTasks.delete(message.workerId);
|
||||
if (message.success) {
|
||||
task.resolve(message.result);
|
||||
}
|
||||
else {
|
||||
task.reject(new Error(message.error || 'Worker task failed'));
|
||||
}
|
||||
// Process next task
|
||||
this.processNextTask();
|
||||
}
|
||||
/**
|
||||
* Process next task from queue
|
||||
*/
|
||||
processNextTask() {
|
||||
if (this.taskQueue.length === 0)
|
||||
return;
|
||||
// Find available worker
|
||||
const availableWorkerId = this.findAvailableWorker();
|
||||
if (availableWorkerId === -1)
|
||||
return;
|
||||
// Get highest priority task
|
||||
const sortedTasks = this.taskQueue.sort((a, b) => b.priority - a.priority);
|
||||
const task = sortedTasks.shift();
|
||||
this.taskQueue = sortedTasks;
|
||||
this.activeTasks.set(String(availableWorkerId), task);
|
||||
// Send task to worker
|
||||
const worker = this.workers[availableWorkerId];
|
||||
if (worker) {
|
||||
worker.postMessage({
|
||||
type: task.type,
|
||||
data: task.data,
|
||||
taskId: task.id
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Find available worker
|
||||
*/
|
||||
findAvailableWorker() {
|
||||
for (let i = 0; i < this.maxWorkers; i++) {
|
||||
if (this.workers[i] && !this.activeTasks.has(String(i))) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
/**
|
||||
* Submit a task to the worker pool
|
||||
*/
|
||||
async submitTask(type, data, priority = 1) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const task = {
|
||||
id: `${type}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||
type,
|
||||
data,
|
||||
resolve,
|
||||
reject,
|
||||
priority,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
this.taskQueue.push(task);
|
||||
this.processNextTask();
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Build chains from training data
|
||||
*/
|
||||
async buildChains(guildId, messages, clearExisting = false, priority = 1) {
|
||||
const workerData = {
|
||||
guildId,
|
||||
messages,
|
||||
clearExisting
|
||||
};
|
||||
return this.submitTask('build-chains', workerData, priority);
|
||||
}
|
||||
/**
|
||||
* Generate response using worker
|
||||
*/
|
||||
async generateResponse(guildId, prefix, maxLength = 50, temperature = 1.0, priority = 1) {
|
||||
const workerData = {
|
||||
guildId,
|
||||
prefix,
|
||||
maxLength,
|
||||
temperature
|
||||
};
|
||||
return this.submitTask('generate-response', workerData, priority);
|
||||
}
|
||||
/**
|
||||
* Batch update chains
|
||||
*/
|
||||
async batchUpdate(guildId, updates, operation, priority = 1) {
|
||||
const workerData = {
|
||||
guildId,
|
||||
updates,
|
||||
operation
|
||||
};
|
||||
return this.submitTask('batch-update', workerData, priority);
|
||||
}
|
||||
/**
|
||||
* Get worker statistics
|
||||
*/
|
||||
async getStats() {
|
||||
const promises = [];
|
||||
for (let i = 0; i < this.maxWorkers; i++) {
|
||||
if (this.workers[i]) {
|
||||
promises.push(this.submitTask('stats', { workerId: i }, 0));
|
||||
}
|
||||
}
|
||||
return Promise.all(promises);
|
||||
}
|
||||
/**
|
||||
* Get pool statistics
|
||||
*/
|
||||
getPoolStats() {
|
||||
return {
|
||||
totalWorkers: this.maxWorkers,
|
||||
activeWorkers: this.activeTasks.size,
|
||||
queuedTasks: this.taskQueue.length,
|
||||
activeTasks: Array.from(this.activeTasks.keys()),
|
||||
availableWorkers: this.workers.filter((w, i) => w && !this.activeTasks.has(String(i))).length
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Gracefully shutdown the worker pool
|
||||
*/
|
||||
async shutdown() {
|
||||
logger_1.default.info('Shutting down worker pool');
|
||||
// Wait for active tasks to complete
|
||||
const shutdownPromises = [];
|
||||
for (let i = 0; i < this.maxWorkers; i++) {
|
||||
const worker = this.workers[i];
|
||||
if (worker) {
|
||||
shutdownPromises.push(new Promise((resolve) => {
|
||||
worker.once('exit', () => resolve());
|
||||
worker.postMessage({ type: 'shutdown' });
|
||||
// Force terminate after 5 seconds
|
||||
setTimeout(() => {
|
||||
worker.terminate().then(() => resolve());
|
||||
}, 5000);
|
||||
}));
|
||||
}
|
||||
}
|
||||
await Promise.all(shutdownPromises);
|
||||
logger_1.default.info('Worker pool shutdown complete');
|
||||
}
|
||||
/**
|
||||
* Emergency shutdown (force terminate all workers)
|
||||
*/
|
||||
async forceShutdown() {
|
||||
logger_1.default.warn('Force shutting down worker pool');
|
||||
const shutdownPromises = [];
|
||||
for (let i = 0; i < this.maxWorkers; i++) {
|
||||
const worker = this.workers[i];
|
||||
if (worker) {
|
||||
shutdownPromises.push(worker.terminate().then(() => { }));
|
||||
}
|
||||
}
|
||||
await Promise.all(shutdownPromises);
|
||||
this.workers = [];
|
||||
this.taskQueue = [];
|
||||
this.activeTasks.clear();
|
||||
logger_1.default.info('Force shutdown complete');
|
||||
}
|
||||
}
|
||||
exports.WorkerPool = WorkerPool;
|
||||
/**
|
||||
* Global worker pool instance
|
||||
*/
|
||||
let globalWorkerPool = null;
|
||||
/**
|
||||
* Get or create global worker pool
|
||||
*/
|
||||
function getWorkerPool(maxWorkers = 4) {
|
||||
if (!globalWorkerPool) {
|
||||
globalWorkerPool = new WorkerPool(maxWorkers);
|
||||
}
|
||||
return globalWorkerPool;
|
||||
}
|
||||
/**
|
||||
* Shutdown global worker pool
|
||||
*/
|
||||
async function shutdownWorkerPool() {
|
||||
if (globalWorkerPool) {
|
||||
await globalWorkerPool.shutdown();
|
||||
globalWorkerPool = null;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user