Implement Phase 1 task 1.8 (Redis Streams publisher + main wiring)
- Bounded in-memory queue (default 10000); overflow throws PublishOverflowError so the framing layer skips ACK and the device retransmits. - Background worker drains via XADD with MAXLEN ~ approximate trimming. - JSON serialization with sentinel encoding for bigint/Buffer/Date; correctly handles Buffer.prototype.toJSON firing before the replacer. - AdapterContext.publish(position, codec) with codec-label closure at dispatch in adapters/teltonika/index.ts; zero changes to the three codec parsers. - connectRedis with retry-on-startup; main.ts wires the full pipeline. - installGracefulShutdown stubbed (full hardening in task 1.12). - 19 new tests (17 unit + 2 Docker-conditional integration). Total 81 passing.
This commit is contained in:
@@ -19,7 +19,7 @@ function makeMockContext(): AdapterContext {
|
||||
};
|
||||
|
||||
return {
|
||||
publish: vi.fn(async (_p: Position) => {}),
|
||||
publish: vi.fn(async (_p: Position, _codec) => {}),
|
||||
logger,
|
||||
metrics,
|
||||
};
|
||||
|
||||
@@ -0,0 +1,235 @@
|
||||
/**
|
||||
* Integration test: Redis Streams publisher round-trip via testcontainers.
|
||||
*
|
||||
* Spins up a real Redis 7 container, publishes a Position containing bigint
|
||||
* and Buffer attributes, XREADs it back, and verifies byte-perfect round-trip
|
||||
* after sentinel decoding.
|
||||
*
|
||||
* If Docker is unavailable (CI without Docker, local dev without Docker Desktop),
|
||||
* the test suite logs a clear message and skips — it does not fail the build.
|
||||
* Docker availability is established by a container start attempt, with the
|
||||
* skip condition set before any test runs.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { GenericContainer, type StartedTestContainer } from 'testcontainers';
|
||||
import type Redis from 'ioredis';
|
||||
import type { Position } from '../src/core/types.js';
|
||||
import { createPublisher, serializePosition } from '../src/core/publish.js';
|
||||
import type { Config } from '../src/config/load.js';
|
||||
import type { Logger } from 'pino';
|
||||
import { vi } from 'vitest';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function makeSilentLogger(): Logger {
|
||||
return {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
} as unknown as Logger;
|
||||
}
|
||||
|
||||
function makeConfig(overrides: Partial<Config> = {}): Config {
|
||||
return {
|
||||
NODE_ENV: 'test',
|
||||
INSTANCE_ID: 'test-integration',
|
||||
LOG_LEVEL: 'silent',
|
||||
TELTONIKA_PORT: 5027,
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
REDIS_TELEMETRY_STREAM: 'telemetry:test',
|
||||
REDIS_STREAM_MAXLEN: 10_000,
|
||||
METRICS_PORT: 9090,
|
||||
PUBLISH_QUEUE_CAPACITY: 100,
|
||||
STRICT_DEVICE_AUTH: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Container lifecycle
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let container: StartedTestContainer | null = null;
|
||||
let redisClient: Redis | null = null;
|
||||
let dockerAvailable = true;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
container = await new GenericContainer('redis:7-alpine')
|
||||
.withExposedPorts(6379)
|
||||
.start();
|
||||
} catch {
|
||||
console.warn(
|
||||
'[publish.integration.test] Docker not available — skipping Redis integration tests',
|
||||
);
|
||||
dockerAvailable = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const mappedPort = container.getMappedPort(6379);
|
||||
const host = container.getHost();
|
||||
|
||||
const { default: Redis } = await import('ioredis');
|
||||
redisClient = new Redis(`redis://${host}:${mappedPort}`, {
|
||||
enableOfflineQueue: false,
|
||||
lazyConnect: true,
|
||||
maxRetriesPerRequest: 0,
|
||||
});
|
||||
await redisClient.connect();
|
||||
}, 60_000);
|
||||
|
||||
afterAll(async () => {
|
||||
if (redisClient) {
|
||||
await redisClient.quit().catch(() => {});
|
||||
}
|
||||
if (container) {
|
||||
await container.stop().catch(() => {});
|
||||
}
|
||||
}, 30_000);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Integration tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('Redis Streams publisher — integration', () => {
|
||||
it('round-trips a Position with bigint and Buffer attributes via XADD/XREAD', async () => {
|
||||
if (!dockerAvailable || !redisClient) {
|
||||
console.warn('[publish.integration.test] skipping: Docker not available');
|
||||
return;
|
||||
}
|
||||
|
||||
// Arrange: position with all attribute types
|
||||
const original: Position = {
|
||||
device_id: '356307042441013',
|
||||
timestamp: new Date('2024-06-15T12:00:00.000Z'),
|
||||
latitude: 54.687157,
|
||||
longitude: 25.279652,
|
||||
altitude: 130,
|
||||
angle: 90,
|
||||
speed: 45,
|
||||
satellites: 12,
|
||||
priority: 0,
|
||||
attributes: {
|
||||
num_attr: 255,
|
||||
big_attr: BigInt('18446744073709551615'), // u64 max
|
||||
buf_attr: Buffer.from([0xde, 0xad, 0xbe, 0xef]),
|
||||
event: 0,
|
||||
},
|
||||
};
|
||||
|
||||
const stream = 'telemetry:test';
|
||||
const config = makeConfig({ REDIS_TELEMETRY_STREAM: stream });
|
||||
|
||||
const publisher = createPublisher(
|
||||
redisClient,
|
||||
config,
|
||||
makeSilentLogger(),
|
||||
{ inc: vi.fn(), observe: vi.fn() },
|
||||
);
|
||||
|
||||
// Act: publish and wait for worker to drain
|
||||
await publisher.publish(original, '8E');
|
||||
await publisher.drain(5_000);
|
||||
|
||||
// Assert: XREAD the record back from Redis
|
||||
const results = await redisClient.xread(
|
||||
'COUNT',
|
||||
'1',
|
||||
'STREAMS',
|
||||
stream,
|
||||
'0',
|
||||
);
|
||||
|
||||
expect(results).not.toBeNull();
|
||||
expect(results).toHaveLength(1);
|
||||
|
||||
const [_streamName, messages] = results![0]!;
|
||||
expect(messages).toHaveLength(1);
|
||||
|
||||
const [_id, fieldValues] = messages[0]!;
|
||||
|
||||
// fieldValues is a flat [k1, v1, k2, v2, ...] array from ioredis
|
||||
const record: Record<string, string> = {};
|
||||
for (let i = 0; i < fieldValues.length; i += 2) {
|
||||
record[fieldValues[i]!] = fieldValues[i + 1]!;
|
||||
}
|
||||
|
||||
// Top-level fields
|
||||
expect(record['ts']).toBe('2024-06-15T12:00:00.000Z');
|
||||
expect(record['device_id']).toBe('356307042441013');
|
||||
expect(record['codec']).toBe('8E');
|
||||
|
||||
// Payload round-trip
|
||||
const payload = JSON.parse(record['payload']!) as {
|
||||
device_id: string;
|
||||
timestamp: string;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
attributes: Record<string, unknown>;
|
||||
};
|
||||
|
||||
expect(payload.device_id).toBe(original.device_id);
|
||||
expect(payload.latitude).toBe(original.latitude);
|
||||
expect(payload.longitude).toBe(original.longitude);
|
||||
|
||||
// Sentinel decoding
|
||||
const bigSentinel = payload.attributes['big_attr'] as { __bigint: string };
|
||||
const bufSentinel = payload.attributes['buf_attr'] as { __buffer_b64: string };
|
||||
const numAttr = payload.attributes['num_attr'] as number;
|
||||
|
||||
expect(BigInt(bigSentinel.__bigint)).toBe(BigInt('18446744073709551615'));
|
||||
expect(Buffer.from(bufSentinel.__buffer_b64, 'base64')).toEqual(
|
||||
Buffer.from([0xde, 0xad, 0xbe, 0xef]),
|
||||
);
|
||||
expect(numAttr).toBe(255);
|
||||
}, 30_000);
|
||||
|
||||
it('serializePosition produces fields consumed correctly by XREAD', async () => {
|
||||
if (!dockerAvailable || !redisClient) {
|
||||
console.warn('[publish.integration.test] skipping: Docker not available');
|
||||
return;
|
||||
}
|
||||
|
||||
const stream = 'telemetry:serialize-test';
|
||||
const pos: Position = {
|
||||
device_id: 'DIRECT123',
|
||||
timestamp: new Date('2024-01-01T00:00:00.000Z'),
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
altitude: 0,
|
||||
angle: 0,
|
||||
speed: 0,
|
||||
satellites: 4,
|
||||
priority: 0,
|
||||
attributes: {},
|
||||
};
|
||||
|
||||
const fields = serializePosition(pos, '16');
|
||||
const args: string[] = [];
|
||||
for (const [k, v] of Object.entries(fields)) {
|
||||
args.push(k, v);
|
||||
}
|
||||
|
||||
// Push directly to verify field layout is correct
|
||||
await redisClient.xadd(stream, '*', ...args);
|
||||
|
||||
const results = await redisClient.xread('COUNT', '1', 'STREAMS', stream, '0');
|
||||
expect(results).not.toBeNull();
|
||||
const [_sName, msgs] = results![0]!;
|
||||
const [_id, fv] = msgs[0]!;
|
||||
|
||||
const record: Record<string, string> = {};
|
||||
for (let i = 0; i < fv.length; i += 2) {
|
||||
record[fv[i]!] = fv[i + 1]!;
|
||||
}
|
||||
|
||||
expect(record['codec']).toBe('16');
|
||||
expect(record['device_id']).toBe('DIRECT123');
|
||||
}, 30_000);
|
||||
});
|
||||
@@ -0,0 +1,295 @@
|
||||
/**
|
||||
* Unit tests for src/core/publish.ts
|
||||
*
|
||||
* Covers:
|
||||
* - jsonReplacer sentinel encoding (bigint, Buffer, Date, plain values)
|
||||
* - serializePosition field shape
|
||||
* - PublishOverflowError thrown when queue is full
|
||||
* - publish() is non-blocking (returns before XADD completes)
|
||||
* - connectRedis retry helper fails with a clear error on unreachable host
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import type { Logger } from 'pino';
|
||||
import type { Metrics, Position } from '../src/core/types.js';
|
||||
import {
|
||||
jsonReplacer,
|
||||
serializePosition,
|
||||
createPublisher,
|
||||
PublishOverflowError,
|
||||
connectRedis,
|
||||
} from '../src/core/publish.js';
|
||||
import type { Config } from '../src/config/load.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function makePosition(overrides: Partial<Position> = {}): Position {
|
||||
return {
|
||||
device_id: 'TEST123456789',
|
||||
timestamp: new Date('2024-01-15T10:30:00.000Z'),
|
||||
latitude: 54.12345,
|
||||
longitude: 25.98765,
|
||||
altitude: 150,
|
||||
angle: 270,
|
||||
speed: 60,
|
||||
satellites: 8,
|
||||
priority: 1,
|
||||
attributes: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeConfig(overrides: Partial<Config> = {}): Config {
|
||||
return {
|
||||
NODE_ENV: 'test',
|
||||
INSTANCE_ID: 'test-instance',
|
||||
LOG_LEVEL: 'silent',
|
||||
TELTONIKA_PORT: 5027,
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
REDIS_TELEMETRY_STREAM: 'telemetry:teltonika',
|
||||
REDIS_STREAM_MAXLEN: 1_000_000,
|
||||
METRICS_PORT: 9090,
|
||||
PUBLISH_QUEUE_CAPACITY: 10_000,
|
||||
STRICT_DEVICE_AUTH: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeSilentLogger(): Logger {
|
||||
return {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
} as unknown as Logger;
|
||||
}
|
||||
|
||||
function makeMetrics(): Metrics {
|
||||
return {
|
||||
inc: vi.fn(),
|
||||
observe: vi.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
// Minimal ioredis stub used for queue tests
|
||||
function makeHangingRedis(): {
|
||||
xadd: ReturnType<typeof vi.fn>;
|
||||
quit: ReturnType<typeof vi.fn>;
|
||||
} {
|
||||
return {
|
||||
// Returns a promise that never resolves — simulates a hung Redis
|
||||
xadd: vi.fn(() => new Promise<string>(() => {})),
|
||||
quit: vi.fn().mockResolvedValue('OK'),
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 1. jsonReplacer encoding
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('jsonReplacer', () => {
|
||||
it('encodes bigint as { __bigint: "<digits>" }', () => {
|
||||
const result = jsonReplacer('x', BigInt('9007199254740993'));
|
||||
expect(result).toEqual({ __bigint: '9007199254740993' });
|
||||
});
|
||||
|
||||
it('encodes zero bigint correctly', () => {
|
||||
expect(jsonReplacer('x', 0n)).toEqual({ __bigint: '0' });
|
||||
});
|
||||
|
||||
it('encodes Buffer as { __buffer_b64: "<base64>" }', () => {
|
||||
const buf = Buffer.from([0xde, 0xad, 0xbe, 0xef]);
|
||||
const result = jsonReplacer('x', buf);
|
||||
expect(result).toEqual({ __buffer_b64: buf.toString('base64') });
|
||||
});
|
||||
|
||||
it('encodes Buffer subarray view (zero-copy) correctly', () => {
|
||||
const backing = Buffer.from([0x00, 0x01, 0x02, 0x03, 0x04, 0x05]);
|
||||
const view = backing.subarray(2, 5); // [0x02, 0x03, 0x04]
|
||||
const result = jsonReplacer('x', view);
|
||||
expect(result).toEqual({ __buffer_b64: view.toString('base64') });
|
||||
});
|
||||
|
||||
it('encodes Date as ISO string', () => {
|
||||
const d = new Date('2024-06-01T00:00:00.000Z');
|
||||
expect(jsonReplacer('x', d)).toBe('2024-06-01T00:00:00.000Z');
|
||||
});
|
||||
|
||||
it('passes through plain numbers unchanged', () => {
|
||||
expect(jsonReplacer('x', 42)).toBe(42);
|
||||
expect(jsonReplacer('x', -3.14)).toBe(-3.14);
|
||||
expect(jsonReplacer('x', 0)).toBe(0);
|
||||
});
|
||||
|
||||
it('passes through strings unchanged', () => {
|
||||
expect(jsonReplacer('x', 'hello')).toBe('hello');
|
||||
});
|
||||
|
||||
it('passes through null unchanged', () => {
|
||||
expect(jsonReplacer('x', null)).toBeNull();
|
||||
});
|
||||
|
||||
it('round-trips bigint + Buffer through JSON.parse with sentinel decoding', () => {
|
||||
const original = {
|
||||
big: BigInt('12345678901234567890'),
|
||||
buf: Buffer.from([0xca, 0xfe]),
|
||||
num: 99,
|
||||
};
|
||||
|
||||
const json = JSON.stringify(original, jsonReplacer);
|
||||
const parsed = JSON.parse(json) as {
|
||||
big: { __bigint: string };
|
||||
buf: { __buffer_b64: string };
|
||||
num: number;
|
||||
};
|
||||
|
||||
// Decode sentinels (simulating Processor-side decoder)
|
||||
expect(BigInt(parsed.big.__bigint)).toBe(BigInt('12345678901234567890'));
|
||||
expect(Buffer.from(parsed.buf.__buffer_b64, 'base64')).toEqual(
|
||||
Buffer.from([0xca, 0xfe]),
|
||||
);
|
||||
expect(parsed.num).toBe(99);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 2. serializePosition field shape
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('serializePosition', () => {
|
||||
it('produces the expected top-level Redis fields', () => {
|
||||
const pos = makePosition();
|
||||
const fields = serializePosition(pos, '8');
|
||||
|
||||
expect(fields).toHaveProperty('ts', '2024-01-15T10:30:00.000Z');
|
||||
expect(fields).toHaveProperty('device_id', 'TEST123456789');
|
||||
expect(fields).toHaveProperty('codec', '8');
|
||||
expect(fields).toHaveProperty('payload');
|
||||
});
|
||||
|
||||
it('payload is valid JSON', () => {
|
||||
const pos = makePosition({ attributes: { speed_raw: 1234n, raw: Buffer.from([0xab]) } });
|
||||
const { payload } = serializePosition(pos, '8E');
|
||||
expect(() => JSON.parse(payload)).not.toThrow();
|
||||
});
|
||||
|
||||
it('payload round-trips bigint and Buffer sentinels', () => {
|
||||
const original = makePosition({
|
||||
attributes: {
|
||||
big_io: BigInt('18446744073709551615'), // u64 max
|
||||
buf_io: Buffer.from([0xde, 0xad]),
|
||||
num_io: 255,
|
||||
},
|
||||
});
|
||||
|
||||
const { payload } = serializePosition(original, '16');
|
||||
const parsed = JSON.parse(payload) as {
|
||||
attributes: Record<string, unknown>;
|
||||
};
|
||||
|
||||
const big = parsed.attributes['big_io'] as { __bigint: string };
|
||||
const buf = parsed.attributes['buf_io'] as { __buffer_b64: string };
|
||||
const num = parsed.attributes['num_io'] as number;
|
||||
|
||||
expect(BigInt(big.__bigint)).toBe(BigInt('18446744073709551615'));
|
||||
expect(Buffer.from(buf.__buffer_b64, 'base64')).toEqual(Buffer.from([0xde, 0xad]));
|
||||
expect(num).toBe(255);
|
||||
});
|
||||
|
||||
it('codec label "8E" is preserved verbatim', () => {
|
||||
const fields = serializePosition(makePosition(), '8E');
|
||||
expect(fields['codec']).toBe('8E');
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 3. Bounded queue overflow
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('createPublisher — overflow', () => {
|
||||
it('throws PublishOverflowError when queue is at capacity', async () => {
|
||||
const redis = makeHangingRedis();
|
||||
// Capacity=3: worker picks up the 1st item (hangs on xadd), so the queue
|
||||
// drains to 0 after the first publish microtask. Subsequent publishes fill
|
||||
// it: 2nd→queue=1, 3rd→queue=2, 4th→queue=3 (full). 5th should overflow.
|
||||
const config = makeConfig({ PUBLISH_QUEUE_CAPACITY: 3 });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const publisher = createPublisher(redis as any, config, makeSilentLogger(), makeMetrics());
|
||||
|
||||
await publisher.publish(makePosition({ device_id: 'A' }), '8'); // worker takes A → queue=0
|
||||
await publisher.publish(makePosition({ device_id: 'B' }), '8'); // queue=1
|
||||
await publisher.publish(makePosition({ device_id: 'C' }), '8'); // queue=2
|
||||
await publisher.publish(makePosition({ device_id: 'D' }), '8'); // queue=3 = capacity
|
||||
|
||||
// 5th publish should overflow
|
||||
await expect(publisher.publish(makePosition({ device_id: 'E' }), '8')).rejects.toBeInstanceOf(
|
||||
PublishOverflowError,
|
||||
);
|
||||
});
|
||||
|
||||
it('increments the overflow metric on overflow', async () => {
|
||||
const redis = makeHangingRedis();
|
||||
// Capacity=1: worker picks up the 1st item immediately (queue→0).
|
||||
// 2nd item fills queue to capacity=1. 3rd overflows.
|
||||
const config = makeConfig({ PUBLISH_QUEUE_CAPACITY: 1 });
|
||||
const metrics = makeMetrics();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const publisher = createPublisher(redis as any, config, makeSilentLogger(), metrics);
|
||||
|
||||
await publisher.publish(makePosition({ device_id: 'A' }), '8'); // worker takes A
|
||||
await publisher.publish(makePosition({ device_id: 'B' }), '8'); // queue=1 = capacity
|
||||
await expect(publisher.publish(makePosition({ device_id: 'C' }), '8')).rejects.toBeInstanceOf(
|
||||
PublishOverflowError,
|
||||
);
|
||||
|
||||
expect(metrics.inc).toHaveBeenCalledWith('teltonika_publish_overflow_total', { codec: '8' });
|
||||
});
|
||||
|
||||
it('publish() resolves without waiting for XADD to complete (non-blocking guarantee)', async () => {
|
||||
// The hanging redis means XADD never completes. publish() must still return
|
||||
// promptly — it only enqueues, it does not await the XADD call itself.
|
||||
const redis = makeHangingRedis();
|
||||
const config = makeConfig({ PUBLISH_QUEUE_CAPACITY: 100 });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const publisher = createPublisher(redis as any, config, makeSilentLogger(), makeMetrics());
|
||||
|
||||
const start = Date.now();
|
||||
await publisher.publish(makePosition(), '8');
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
// Should complete in well under 200ms regardless of Redis latency.
|
||||
// The worker may have already started (calling xadd which hangs), but
|
||||
// publish() itself returned before xadd resolved — that is the guarantee.
|
||||
expect(elapsed).toBeLessThan(200);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 4. connectRedis retry helper
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('connectRedis', () => {
|
||||
it('calls process.exit(1) when Redis is unreachable after all retries', async () => {
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation((_code) => {
|
||||
throw new Error('process.exit called');
|
||||
});
|
||||
|
||||
const logger = makeSilentLogger();
|
||||
|
||||
// Port 1 is almost certainly not listening
|
||||
await expect(
|
||||
connectRedis('redis://127.0.0.1:1', logger, 1),
|
||||
).rejects.toThrow('process.exit called');
|
||||
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
|
||||
exitSpy.mockRestore();
|
||||
});
|
||||
}, 15_000);
|
||||
@@ -73,7 +73,7 @@ function makeMockContext(): AdapterContext {
|
||||
};
|
||||
|
||||
return {
|
||||
publish: vi.fn(async (_p: Position) => {}),
|
||||
publish: vi.fn(async (_p: Position, _codec) => {}),
|
||||
logger,
|
||||
metrics,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user