mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-24 18:07:17 +01:00
30bafdd382
* cleanup: remove unused team arg from registerLastStep * cleanup: rename promises to ackPromises to make it more clear thats what they are * cleanup(plugin-server): make waitForAck explicit/required * add Kafka produce/ack metrics * Clarify Kafka produce metric/labels
89 lines
3.1 KiB
TypeScript
89 lines
3.1 KiB
TypeScript
import { Consumer, Kafka, KafkaMessage, logLevel } from 'kafkajs'
|
|
import { v4 as uuidv4 } from 'uuid'
|
|
|
|
import { defaultConfig } from '../src/config/config'
|
|
import { getMetric } from './api'
|
|
import { waitForExpect } from './expectations'
|
|
import { produce } from './kafka'
|
|
|
|
let kafka: Kafka
|
|
|
|
beforeAll(() => {
|
|
kafka = new Kafka({ brokers: [defaultConfig.KAFKA_HOSTS], logLevel: logLevel.NOTHING })
|
|
})
|
|
|
|
describe('dlq handling', () => {
|
|
// Test out some error cases that we wouldn't be able to handle without
|
|
// producing to the jobs queue directly.
|
|
|
|
let dlq: KafkaMessage[]
|
|
let dlqConsumer: Consumer
|
|
|
|
beforeAll(async () => {
|
|
// Make sure the dlq topic exists before starting the consumer
|
|
const admin = kafka.admin()
|
|
await admin.createTopics({ topics: [{ topic: 'jobs_dlq' }] })
|
|
await admin.disconnect()
|
|
|
|
dlq = []
|
|
dlqConsumer = kafka.consumer({ groupId: 'jobs-consumer-test' })
|
|
await dlqConsumer.subscribe({ topic: 'jobs_dlq', fromBeginning: true })
|
|
await dlqConsumer.run({
|
|
eachMessage: ({ message }) => {
|
|
dlq.push(message)
|
|
return Promise.resolve()
|
|
},
|
|
})
|
|
})
|
|
|
|
afterAll(async () => {
|
|
await dlqConsumer.disconnect()
|
|
})
|
|
|
|
test.concurrent(`handles empty messages`, async () => {
|
|
const key = uuidv4()
|
|
|
|
await produce({ topic: 'jobs', message: null, key, waitForAck: true })
|
|
|
|
await waitForExpect(() => {
|
|
const messages = dlq.filter((message) => message.key?.toString() === key)
|
|
expect(messages.length).toBe(1)
|
|
})
|
|
})
|
|
|
|
test.concurrent(`handles invalid JSON`, async () => {
|
|
const key = uuidv4()
|
|
|
|
await produce({ topic: 'jobs', message: Buffer.from('invalid json'), key, waitForAck: true })
|
|
|
|
await waitForExpect(() => {
|
|
const messages = dlq.filter((message) => message.key?.toString() === key)
|
|
expect(messages.length).toBe(1)
|
|
})
|
|
})
|
|
|
|
test.concurrent('consumer updates timestamp exported to prometheus', async () => {
|
|
// NOTE: it may be another event other than the one we emit here that causes
|
|
// the gauge to increase, but pushing this event through should at least
|
|
// ensure that the gauge is updated.
|
|
const metricBefore = await getMetric({
|
|
name: 'latest_processed_timestamp_ms',
|
|
type: 'GAUGE',
|
|
labels: { topic: 'jobs', partition: '0', groupId: 'jobs-inserter' },
|
|
})
|
|
|
|
await produce({ topic: 'jobs', message: Buffer.from(''), key: '', waitForAck: true })
|
|
|
|
await waitForExpect(async () => {
|
|
const metricAfter = await getMetric({
|
|
name: 'latest_processed_timestamp_ms',
|
|
type: 'GAUGE',
|
|
labels: { topic: 'jobs', partition: '0', groupId: 'jobs-inserter' },
|
|
})
|
|
expect(metricAfter).toBeGreaterThan(metricBefore)
|
|
expect(metricAfter).toBeLessThan(Date.now()) // Make sure, e.g. we're not setting micro seconds
|
|
expect(metricAfter).toBeGreaterThan(Date.now() - 60_000) // Make sure, e.g. we're not setting seconds
|
|
}, 10_000)
|
|
})
|
|
})
|