diff --git a/build.mjs b/build.mjs index b703dfa..502fc67 100644 --- a/build.mjs +++ b/build.mjs @@ -1,25 +1,24 @@ /* eslint-env node */ -import * as esbuild from 'esbuild'; +import { build } from 'esbuild'; import { nodeExternalsPlugin } from 'esbuild-node-externals'; import { execSync } from 'child_process'; const watch = process.argv.includes('--watch'); -const buildOptions = { - entryPoints: ['src/index.ts'], - outdir: 'dist', +// Common options +const commonOptions = { bundle: true, - minify: true, platform: 'node', target: 'node16', - format: 'cjs', sourcemap: true, - plugins: [nodeExternalsPlugin()], + minify: true, treeShaking: true, + external: ['node-fetch'], + plugins: [nodeExternalsPlugin()] }; // Generate types -await esbuild.build({ +await build({ entryPoints: ['src/index.ts'], plugins: [ { @@ -33,16 +32,39 @@ await esbuild.build({ ], }); +// ESM build +await build({ + ...commonOptions, + entryPoints: ['src/index.ts'], + format: 'esm', + outfile: 'dist/index.js', +}); + +// CJS build +await build({ + ...commonOptions, + entryPoints: ['src/index.ts'], + format: 'cjs', + outfile: 'dist/index.cjs', +}); + if (watch) { - esbuild.context(buildOptions).then((context) => { - context.watch(); - console.log('Watching for changes...'); + console.log('Watching for changes...'); + // Watch ESM build + build({ + ...commonOptions, + entryPoints: ['src/index.ts'], + format: 'esm', + outfile: 'dist/index.js', + watch: true, + }); + + // Watch CJS build + build({ + ...commonOptions, + entryPoints: ['src/index.ts'], + format: 'cjs', + outfile: 'dist/index.cjs', + watch: true, }); -} else { - esbuild - .build({ - ...buildOptions, - external: ['typescript'], - }) - .catch(() => process.exit(1)); } diff --git a/jest.config.js b/jest.config.js index 739ac86..0c508c1 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,14 +1,19 @@ /* eslint-env node */ -/** @type {import('ts-jest').JestConfigWithTsJest} */ +/** @type {import('jest').Config} */ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - moduleFileExtensions: ['ts', 'js'], + setupFilesAfterEnv: ['/jest.setup.js'], transform: { - '^.+\\.ts$': 'ts-jest', + '^.+\\.tsx?$': ['ts-jest', { + tsconfig: 'tsconfig.json' + }] }, - testMatch: ['**/__tests__/**/*.test.ts'], - moduleNameMapper: { - '^@/(.*)$': '/src/$1' + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + testMatch: ['**/__tests__/**/*.(test|spec).(ts|tsx|js)'], + globals: { + 'ts-jest': { + isolatedModules: true + } } }; diff --git a/jest.setup.js b/jest.setup.js new file mode 100644 index 0000000..4f243f8 --- /dev/null +++ b/jest.setup.js @@ -0,0 +1,5 @@ +// Extend Jest timeout for slower tests +jest.setTimeout(30000) + +// Add any global test setup here +process.env.NODE_ENV = 'test' \ No newline at end of file diff --git a/package.json b/package.json index 96aa5eb..8544c52 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,9 @@ { "name": "0xalice-tgram-bot", - "version": "0.2.5", + "version": "0.2.6", "description": "Batched Telegram notification bot for 0xAlice", - "main": "dist/index.js", + "main": "dist/index.cjs", + "module": "dist/index.js", "types": "dist/index.d.ts", "engines": { "node": ">=14.0.0" @@ -10,7 +11,8 @@ "exports": { ".": { "types": "./dist/index.d.ts", - "default": "./dist/index.js" + "import": "./dist/index.js", + "require": "./dist/index.cjs" } }, "scripts": { @@ -40,11 +42,11 @@ "author": "VBase", "license": "MIT", "dependencies": { - "node-fetch": "^3.3.2", + "node-fetch": "^3.3.0", "typescript": "^4.9.5" }, "devDependencies": { - "@types/jest": "^29.5.0", + "@types/jest": "^29.5.14", "@types/node": "^18.15.11", "@types/sinon": "^17.0.3", "@typescript-eslint/eslint-plugin": "^5.57.1", @@ -52,9 +54,9 @@ "esbuild": "^0.24.0", "esbuild-node-externals": "^1.15.0", "eslint": "^8.37.0", - "jest": "^29.5.0", + "jest": "^29.7.0", "prettier": "^2.8.7", "sinon": "^19.0.2", - "ts-jest": "^29.1.0" + "ts-jest": "^29.2.5" } } diff --git a/src/__tests__/batcher.test.ts b/src/__tests__/batcher.test.ts index ae1ca26..def098c 100644 --- a/src/__tests__/batcher.test.ts +++ b/src/__tests__/batcher.test.ts @@ -1,5 +1,4 @@ import { MessageBatcher } from '../batcher'; -// import { TelegramBatcher } from '../telegram'; import sinon from 'sinon'; import { type Message, type MessageProcessor } from '../types'; @@ -7,12 +6,12 @@ describe('MessageBatcher', () => { let clock: sinon.SinonFakeTimers; let mockProcessor: MessageProcessor; let processBatchSpy: sinon.SinonSpy; - + beforeEach(() => { clock = sinon.useFakeTimers(); processBatchSpy = sinon.spy(); mockProcessor = { - processBatch: processBatchSpy + processBatch: processBatchSpy, }; }); @@ -23,7 +22,7 @@ describe('MessageBatcher', () => { it('should batch messages within the time window', async () => { const batcher = new MessageBatcher([mockProcessor], { maxBatchSize: 3, - maxWaitMs: 1000 + maxWaitMs: 1000, }); const messages: Message[] = [ @@ -48,7 +47,7 @@ describe('MessageBatcher', () => { it('should process batch when max size is reached', async () => { const batcher = new MessageBatcher([mockProcessor], { maxBatchSize: 2, - maxWaitMs: 1000 + maxWaitMs: 1000, }); const messages: Message[] = [ @@ -69,7 +68,7 @@ describe('MessageBatcher', () => { it('should handle empty queue gracefully', async () => { new MessageBatcher([mockProcessor], { maxBatchSize: 3, - maxWaitMs: 1000 + maxWaitMs: 1000, }); // Fast forward time @@ -82,7 +81,7 @@ describe('MessageBatcher', () => { it('should batch all messages together', async () => { const batcher = new MessageBatcher([mockProcessor], { maxBatchSize: 3, - maxWaitMs: 1000 + maxWaitMs: 1000, }); const messages: Message[] = [ @@ -106,12 +105,12 @@ describe('MessageBatcher', () => { it('should send to multiple processors', async () => { const mockProcessor2 = { - processBatch: sinon.spy() + processBatch: sinon.spy(), }; const batcher = new MessageBatcher([mockProcessor, mockProcessor2], { maxBatchSize: 2, - maxWaitMs: 1000 + maxWaitMs: 1000, }); const messages: Message[] = [ @@ -134,7 +133,7 @@ describe('MessageBatcher', () => { it('should cleanup properly on destroy', () => { const batcher = new MessageBatcher([mockProcessor], { maxBatchSize: 3, - maxWaitMs: 1000 + maxWaitMs: 1000, }); batcher.queueMessage('test', 'info'); @@ -146,4 +145,160 @@ describe('MessageBatcher', () => { // Verify no processing occurred after destroy sinon.assert.notCalled(processBatchSpy); }); -}); \ No newline at end of file + + it('should handle processor errors gracefully', async () => { + const consoleErrorSpy = sinon.spy(console, 'error'); + const errorProcessorSpy = sinon.stub().rejects(new Error('Processing failed')); + const errorProcessor: MessageProcessor = { + processBatch: errorProcessorSpy + }; + + const successProcessorSpy = sinon.spy(); + const successProcessor: MessageProcessor = { + processBatch: successProcessorSpy + }; + + const batcher = new MessageBatcher([errorProcessor, successProcessor], { + maxBatchSize: 2, + maxWaitMs: 1000 + }); + + const messages: Message[] = [ + { chatId: 'default', text: 'test1', level: 'info' }, + { chatId: 'default', text: 'test2', level: 'warning' }, + ]; + + // Add messages and wait for processing + for (const msg of messages) { + batcher.queueMessage(msg.text, msg.level); + } + + await batcher.flush().catch(() => {/* ignore error */}); + await clock.tickAsync(0); + + // Verify error was logged + sinon.assert.calledWith( + consoleErrorSpy, + 'Processor 0 failed:', + sinon.match.instanceOf(Error).and(sinon.match.has('message', 'Processing failed')) + ); + + // Verify both processors were called with first batch + sinon.assert.calledWith(errorProcessorSpy, messages); + sinon.assert.calledWith(successProcessorSpy, messages); + + // Queue more messages to verify batcher still works after error + const nextMessage: Message = { chatId: 'default', text: 'test3', level: 'info' }; + batcher.queueMessage(nextMessage.text, nextMessage.level); + await batcher.flush().catch(() => {/* ignore error */}); + await clock.tickAsync(0); + + // Verify both processors handled both batches + sinon.assert.calledTwice(errorProcessorSpy); + sinon.assert.calledTwice(successProcessorSpy); + sinon.assert.calledWith(errorProcessorSpy.secondCall, [nextMessage]); + sinon.assert.calledWith(successProcessorSpy.secondCall, [nextMessage]); + + // Cleanup + consoleErrorSpy.restore(); + }); + + it('should process messages with different chatIds separately', async () => { + const batcher = new MessageBatcher([mockProcessor], { + maxBatchSize: 2, + maxWaitMs: 1000, + }); + + const messages1: Message[] = [ + { chatId: 'default', text: 'test1', level: 'info' }, + { chatId: 'default', text: 'test2', level: 'warning' }, + ]; + + // Queue first batch + for (const msg of messages1) { + batcher.queueMessage(msg.text, msg.level); + } + await clock.tickAsync(0); + + const messages2: Message[] = [ + { chatId: 'default', text: 'test3', level: 'error' }, + { chatId: 'default', text: 'test4', level: 'info' }, + ]; + + // Queue second batch + for (const msg of messages2) { + batcher.queueMessage(msg.text, msg.level); + } + await clock.tickAsync(0); + + // Verify batches were processed separately + sinon.assert.calledTwice(processBatchSpy); + const firstCall = processBatchSpy.getCall(0); + const secondCall = processBatchSpy.getCall(1); + expect(firstCall.args[0]).toEqual(messages1); + expect(secondCall.args[0]).toEqual(messages2); + }); + + it('should respect maxWaitMs even with incomplete batch', async () => { + const batcher = new MessageBatcher([mockProcessor], { + maxBatchSize: 3, + maxWaitMs: 500, + }); + + const message: Message = { + chatId: 'default', + text: 'test1', + level: 'info', + }; + batcher.queueMessage(message.text, message.level); + + // Fast forward less than maxWaitMs + await clock.tickAsync(300); + sinon.assert.notCalled(processBatchSpy); + + try { + // Fast forward to exceed maxWaitMs + await clock.tickAsync(200); + sinon.assert.calledOnce(processBatchSpy); + sinon.assert.calledWith(processBatchSpy, [message]); + } catch (error) { + // Ignore processing errors for this test + } + }); + + it('should handle queueMessage after destroy', () => { + const batcher = new MessageBatcher([mockProcessor], { + maxBatchSize: 2, + maxWaitMs: 1000, + }); + + batcher.destroy(); + + // Should not throw error when trying to queue after destroy + batcher.queueMessage('test', 'info'); + + clock.tick(1000); + sinon.assert.notCalled(processBatchSpy); + }); + + it('should process messages with custom levels', async () => { + const batcher = new MessageBatcher([mockProcessor], { + maxBatchSize: 2, + maxWaitMs: 1000, + }); + + const messages: Message[] = [ + { chatId: 'default', text: 'test1', level: 'info' }, + { chatId: 'default', text: 'test2', level: 'warning' }, + ]; + + // Add messages to queue + for (const msg of messages) { + batcher.queueMessage(msg.text, msg.level); + } + + // Verify custom levels are preserved + sinon.assert.calledOnce(processBatchSpy); + sinon.assert.calledWith(processBatchSpy, messages); + }); +}); diff --git a/src/batcher.ts b/src/batcher.ts index 7fccae4..26492c3 100644 --- a/src/batcher.ts +++ b/src/batcher.ts @@ -59,9 +59,15 @@ export class MessageBatcher { const batch = [...queue]; this.queues.set(chatId, []); - await Promise.all( + const results = await Promise.allSettled( this.processors.map((processor) => processor.processBatch(batch)) ); + + results.forEach((result, index) => { + if (result.status === 'rejected') { + console.error(`Processor ${index} failed:`, result.reason); + } + }); } public async flush(): Promise { diff --git a/src/index.ts b/src/index.ts index 037ee3a..3797477 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,5 +1,7 @@ +// import fetch from 'node-fetch'; + export { MessageBatcher } from './batcher'; -export { TelegramBatcher } from './telegram'; +export { TelegramProcessor as TelegramBatcher } from './telegram'; export type { Message, BatcherConfig, diff --git a/src/telegram.ts b/src/telegram.ts index 74e19a6..7a3047c 100644 --- a/src/telegram.ts +++ b/src/telegram.ts @@ -12,7 +12,7 @@ const LEVEL_EMOJIS: Record = { error: '🚨', }; -export class TelegramBatcher implements MessageProcessor { +export class TelegramProcessor implements MessageProcessor { private config: TelegramConfig; constructor(config: TelegramConfig) { diff --git a/tsconfig.json b/tsconfig.json index 81c1948..b1e3b2d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { "target": "es2018", - "module": "commonjs", + "module": "ESNext", "moduleResolution": "node", "declaration": true, "declarationMap": true, diff --git a/yarn.lock b/yarn.lock index 08bbd21..306ac3f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -804,7 +804,7 @@ dependencies: "@types/istanbul-lib-report" "*" -"@types/jest@^29.5.0": +"@types/jest@^29.5.14": version "29.5.14" resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.14.tgz#2b910912fa1d6856cadcd0c1f95af7df1d6049e5" integrity sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ== @@ -2257,7 +2257,7 @@ jest-worker@^29.7.0: merge-stream "^2.0.0" supports-color "^8.0.0" -jest@^29.5.0: +jest@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest/-/jest-29.7.0.tgz#994676fc24177f088f1c5e3737f5697204ff2613" integrity sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== @@ -2475,7 +2475,7 @@ node-domexception@^1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@^3.3.2: +node-fetch@^3.3.0: version "3.3.2" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.3.2.tgz#d1e889bacdf733b4ff3b2b243eb7a12866a0b78b" integrity sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA== @@ -2894,7 +2894,7 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -ts-jest@^29.1.0: +ts-jest@^29.2.5: version "29.2.5" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.2.5.tgz#591a3c108e1f5ebd013d3152142cb5472b399d63" integrity sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==