/** * Unit tests for src/core/writer.ts * * All Postgres I/O is mocked — no real database required. The integration test * (task 1.10) covers byte-level round-trip including TimescaleDB hypertable. * * Covers: * - Happy path: all records inserted (all appear in RETURNING rows) * - Duplicate-key: ON CONFLICT DO NOTHING → records absent from RETURNING → 'duplicate' * - Mixed: half new, half duplicate * - Pool error: all records in the batch get 'failed'; error is attached * - Chunking: batch larger than WRITE_BATCH_SIZE results in multiple queries * - Bigint attribute is stringified before serialization * - Buffer attribute is base64-encoded before serialization * - Empty batch returns empty results * - SQL parameter order: device_id, ts, latitude, longitude, altitude, angle, * speed, satellites, priority, codec, attributes */ import { describe, it, expect, vi } from 'vitest'; import type { Logger } from 'pino'; import type { Pool } from 'pg'; import type { Config } from '../src/config/load.js'; import type { Metrics, Position } from '../src/core/types.js'; import type { ConsumedRecord } from '../src/core/consumer.js'; import { createWriter } from '../src/core/writer.js'; // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- function makeSilentLogger(): Logger { return { debug: vi.fn(), info: vi.fn(), warn: vi.fn(), error: vi.fn(), fatal: vi.fn(), child: vi.fn().mockReturnThis(), trace: vi.fn(), level: 'silent', silent: vi.fn(), } as unknown as Logger; } function makeMetrics(): Metrics { return { inc: vi.fn(), observe: vi.fn(), }; } function makeConfig(overrides: Partial = {}): Config { return { NODE_ENV: 'test', INSTANCE_ID: 'test-processor', LOG_LEVEL: 'silent', REDIS_URL: 'redis://localhost:6379', POSTGRES_URL: 'postgres://localhost:5432/test', REDIS_TELEMETRY_STREAM: 'telemetry:t', REDIS_CONSUMER_GROUP: 'processor', REDIS_CONSUMER_NAME: 'test-consumer', METRICS_PORT: 9090, BATCH_SIZE: 10, BATCH_BLOCK_MS: 100, WRITE_BATCH_SIZE: 50, DEVICE_STATE_LRU_CAP: 1000, ...overrides, }; } function makePosition(deviceId: string, overrides: Partial = {}): Position { return { device_id: deviceId, timestamp: new Date('2024-05-01T12:00:00.000Z'), latitude: 54.6872, longitude: 25.2797, altitude: 100, angle: 90, speed: 50, satellites: 12, priority: 1, attributes: {}, ...overrides, }; } function makeRecord(id: string, deviceId: string, overrides: Partial = {}): ConsumedRecord { return { id, position: makePosition(deviceId, overrides), codec: '8', ts: '2024-05-01T12:00:00.000Z', }; } /** * Creates a mock pg.Pool whose query() returns the given rows. * Captures all SQL and params for assertion. */ function makeMockPool( queryResponses: Array<{ rows: unknown[] } | Error>, ): { pool: Pool; queryCalls: Array<{ sql: string; params: unknown[] }>; } { const queryCalls: Array<{ sql: string; params: unknown[] }> = []; let callIndex = 0; const query = vi.fn(async (sql: string, params: unknown[] = []) => { queryCalls.push({ sql, params }); const response = queryResponses[callIndex++]; if (response instanceof Error) throw response; return response ?? { rows: [] }; }); return { pool: { query } as unknown as Pool, queryCalls, }; } // --------------------------------------------------------------------------- // Tests — happy path // --------------------------------------------------------------------------- describe('createWriter — happy path', () => { it('returns inserted for all records when all appear in RETURNING', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('1-0', 'DEV001', { timestamp: ts }), makeRecord('1-1', 'DEV002', { timestamp: ts }), ]; const { pool } = makeMockPool([ { rows: [ { device_id: 'DEV001', ts }, { device_id: 'DEV002', ts }, ], }, ]); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); const results = await writer.write(records); expect(results).toHaveLength(2); expect(results[0]).toEqual({ id: '1-0', status: 'inserted' }); expect(results[1]).toEqual({ id: '1-1', status: 'inserted' }); }); it('returns duplicate for records absent from RETURNING', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('2-0', 'DEV003', { timestamp: ts }), makeRecord('2-1', 'DEV004', { timestamp: ts }), ]; // Only DEV003 returned — DEV004 was a conflict const { pool } = makeMockPool([{ rows: [{ device_id: 'DEV003', ts }] }]); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); const results = await writer.write(records); expect(results).toHaveLength(2); expect(results.find((r) => r.id === '2-0')?.status).toBe('inserted'); expect(results.find((r) => r.id === '2-1')?.status).toBe('duplicate'); }); it('handles mixed batch: some inserted, some duplicate', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('3-0', 'DEV005', { timestamp: ts }), makeRecord('3-1', 'DEV006', { timestamp: ts }), makeRecord('3-2', 'DEV007', { timestamp: ts }), makeRecord('3-3', 'DEV008', { timestamp: ts }), ]; // DEV005 and DEV007 are new; DEV006 and DEV008 are duplicates const { pool } = makeMockPool([ { rows: [ { device_id: 'DEV005', ts }, { device_id: 'DEV007', ts }, ], }, ]); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); const results = await writer.write(records); expect(results.find((r) => r.id === '3-0')?.status).toBe('inserted'); expect(results.find((r) => r.id === '3-1')?.status).toBe('duplicate'); expect(results.find((r) => r.id === '3-2')?.status).toBe('inserted'); expect(results.find((r) => r.id === '3-3')?.status).toBe('duplicate'); }); it('returns empty array for empty input', async () => { const { pool } = makeMockPool([]); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); const results = await writer.write([]); expect(results).toEqual([]); }); }); // --------------------------------------------------------------------------- // Tests — failure handling // --------------------------------------------------------------------------- describe('createWriter — pool error', () => { it('returns failed for all records in the chunk when pool throws', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('4-0', 'DEV009', { timestamp: ts }), makeRecord('4-1', 'DEV010', { timestamp: ts }), ]; const dbError = new Error('connection terminated unexpectedly'); const { pool } = makeMockPool([dbError]); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); const results = await writer.write(records); expect(results).toHaveLength(2); for (const result of results) { expect(result.status).toBe('failed'); expect(result.error).toBeDefined(); expect(result.error?.message).toBe('connection terminated unexpectedly'); } }); it('logs error and increments failed metric on pool error', async () => { const records = [makeRecord('5-0', 'DEV011')]; const { pool } = makeMockPool([new Error('timeout')]); const logger = makeSilentLogger(); const metrics = makeMetrics(); const writer = createWriter(pool, makeConfig(), logger, metrics); await writer.write(records); expect(logger.error).toHaveBeenCalled(); expect(metrics.inc).toHaveBeenCalledWith( 'processor_position_writes_total', { status: 'failed' }, records.length, ); }); }); // --------------------------------------------------------------------------- // Tests — chunking // --------------------------------------------------------------------------- describe('createWriter — chunking', () => { it('splits a batch larger than WRITE_BATCH_SIZE into multiple sequential queries', async () => { const writeBatchSize = 3; const config = makeConfig({ WRITE_BATCH_SIZE: writeBatchSize }); const ts = new Date('2024-05-01T12:00:00.000Z'); // 7 records → ceil(7/3) = 3 chunks: [3, 3, 1] const records = Array.from({ length: 7 }, (_, i) => makeRecord(`6-${i}`, `DEV${100 + i}`, { timestamp: ts }), ); const { pool, queryCalls } = makeMockPool([ { rows: records.slice(0, 3).map((r) => ({ device_id: r.position.device_id, ts })) }, { rows: records.slice(3, 6).map((r) => ({ device_id: r.position.device_id, ts })) }, { rows: records.slice(6).map((r) => ({ device_id: r.position.device_id, ts })) }, ]); const writer = createWriter(pool, config, makeSilentLogger(), makeMetrics()); const results = await writer.write(records); // 3 separate queries issued expect(queryCalls).toHaveLength(3); // All 7 records should be returned expect(results).toHaveLength(7); for (const result of results) { expect(result.status).toBe('inserted'); } }); it('first chunk fails, second chunk succeeds — correct per-record status', async () => { const writeBatchSize = 2; const config = makeConfig({ WRITE_BATCH_SIZE: writeBatchSize }); const ts = new Date('2024-05-01T12:00:00.000Z'); const records = Array.from({ length: 4 }, (_, i) => makeRecord(`7-${i}`, `DEV${200 + i}`, { timestamp: ts }), ); const { pool } = makeMockPool([ new Error('chunk 1 failed'), { rows: records.slice(2).map((r) => ({ device_id: r.position.device_id, ts })), }, ]); const writer = createWriter(pool, config, makeSilentLogger(), makeMetrics()); const results = await writer.write(records); expect(results[0]?.status).toBe('failed'); expect(results[1]?.status).toBe('failed'); expect(results[2]?.status).toBe('inserted'); expect(results[3]?.status).toBe('inserted'); }); }); // --------------------------------------------------------------------------- // Tests — attribute serialization // --------------------------------------------------------------------------- describe('createWriter — attribute serialization', () => { it('serializes bigint attributes as decimal strings in the attributes JSON', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const u64Max = BigInt('18446744073709551615'); const records = [ makeRecord('8-0', 'DEV020', { timestamp: ts, attributes: { io_240: u64Max }, }), ]; let capturedParams: unknown[] = []; const query = vi.fn(async (sql: string, params: unknown[]) => { capturedParams = params; return { rows: [{ device_id: 'DEV020', ts }] }; }); const pool = { query } as unknown as Pool; const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); await writer.write(records); // The attributes param is the 11th per row (index 10) const attributesParam = capturedParams[10] as string; expect(typeof attributesParam).toBe('string'); const parsed = JSON.parse(attributesParam) as Record; expect(parsed['io_240']).toBe('18446744073709551615'); // Must be a string, not a bigint (JSON can't hold bigints) expect(typeof parsed['io_240']).toBe('string'); }); it('serializes Buffer attributes as base64 strings in the attributes JSON', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const rawBytes = Buffer.from([0xde, 0xad, 0xbe, 0xef]); const records = [ makeRecord('9-0', 'DEV021', { timestamp: ts, attributes: { io_nx: rawBytes }, }), ]; let capturedParams: unknown[] = []; const query = vi.fn(async (sql: string, params: unknown[]) => { capturedParams = params; return { rows: [{ device_id: 'DEV021', ts }] }; }); const pool = { query } as unknown as Pool; const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); await writer.write(records); const attributesParam = capturedParams[10] as string; expect(typeof attributesParam).toBe('string'); const parsed = JSON.parse(attributesParam) as Record; const b64 = parsed['io_nx']; expect(typeof b64).toBe('string'); // Decode and verify byte equality const decoded = Buffer.from(b64 as string, 'base64'); expect(decoded).toEqual(rawBytes); }); it('serializes numeric attributes as-is in the attributes JSON', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('10-0', 'DEV022', { timestamp: ts, attributes: { io_21: 42, io_1: 0 }, }), ]; let capturedParams: unknown[] = []; const query = vi.fn(async (sql: string, params: unknown[]) => { capturedParams = params; return { rows: [{ device_id: 'DEV022', ts }] }; }); const pool = { query } as unknown as Pool; const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); await writer.write(records); const attributesParam = capturedParams[10] as string; const parsed = JSON.parse(attributesParam) as Record; expect(parsed['io_21']).toBe(42); expect(parsed['io_1']).toBe(0); }); }); // --------------------------------------------------------------------------- // Tests — SQL parameter ordering // --------------------------------------------------------------------------- describe('createWriter — SQL parameter ordering', () => { it('passes parameters in the correct column order', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const position: Position = { device_id: 'PARAMTEST001', timestamp: ts, latitude: 10.0, longitude: 20.0, altitude: 300, angle: 180, speed: 75, satellites: 9, priority: 2, attributes: { io_21: 99 }, }; const records: ConsumedRecord[] = [{ id: 'p-0', position, codec: '8E', ts: ts.toISOString() }]; let capturedParams: unknown[] = []; const query = vi.fn(async (sql: string, params: unknown[]) => { capturedParams = params; return { rows: [{ device_id: 'PARAMTEST001', ts }] }; }); const pool = { query } as unknown as Pool; const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); await writer.write(records); // Expected column order (11 params for row 0, 1-indexed in SQL, 0-indexed here): // $1 device_id // $2 ts // $3 latitude // $4 longitude // $5 altitude // $6 angle // $7 speed // $8 satellites // $9 priority // $10 codec // $11 attributes expect(capturedParams[0]).toBe('PARAMTEST001'); expect(capturedParams[1]).toEqual(ts); expect(capturedParams[2]).toBe(10.0); expect(capturedParams[3]).toBe(20.0); expect(capturedParams[4]).toBe(300); expect(capturedParams[5]).toBe(180); expect(capturedParams[6]).toBe(75); expect(capturedParams[7]).toBe(9); expect(capturedParams[8]).toBe(2); expect(capturedParams[9]).toBe('8E'); expect(typeof capturedParams[10]).toBe('string'); const attrs = JSON.parse(capturedParams[10] as string) as Record; expect(attrs['io_21']).toBe(99); }); it('SQL contains ON CONFLICT DO NOTHING and RETURNING clause', async () => { const records = [makeRecord('q-0', 'DEV030')]; let capturedSql = ''; const query = vi.fn(async (sql: string, _params: unknown[]) => { capturedSql = sql; return { rows: [{ device_id: 'DEV030', ts: new Date() }] }; }); const pool = { query } as unknown as Pool; const writer = createWriter(pool, makeConfig(), makeSilentLogger(), makeMetrics()); await writer.write(records); expect(capturedSql).toMatch(/ON CONFLICT.*DO NOTHING/i); expect(capturedSql).toMatch(/RETURNING/i); expect(capturedSql).toMatch(/device_id/); expect(capturedSql).toMatch(/ts/); }); }); // --------------------------------------------------------------------------- // Tests — metrics // --------------------------------------------------------------------------- describe('createWriter — metrics', () => { it('emits inserted and duplicate counters after a successful write', async () => { const ts = new Date('2024-05-01T12:00:00.000Z'); const records = [ makeRecord('m-0', 'DEV040', { timestamp: ts }), makeRecord('m-1', 'DEV041', { timestamp: ts }), ]; // Only DEV040 returned — DEV041 is duplicate const { pool } = makeMockPool([{ rows: [{ device_id: 'DEV040', ts }] }]); const metrics = makeMetrics(); const writer = createWriter(pool, makeConfig(), makeSilentLogger(), metrics); await writer.write(records); expect(metrics.inc).toHaveBeenCalledWith('processor_position_writes_total', { status: 'inserted' }, 1); expect(metrics.inc).toHaveBeenCalledWith('processor_position_writes_total', { status: 'duplicate' }, 1); expect(metrics.observe).toHaveBeenCalledWith( 'processor_position_write_duration_seconds', expect.any(Number), ); }); });