0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-21 21:49:51 +01:00

fix: delete is sometimes timing out (#16569)

* fix: delete is sometimes timing out

* but without importing from FE code
This commit is contained in:
Paul D'Ambra 2023-07-14 08:48:40 +01:00 committed by GitHub
parent 6cabee6ed0
commit 9ecd34553e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 25 additions and 1 deletions

View File

@ -12,9 +12,11 @@
<env name="KAFKA_HOSTS" value="localhost:9092" />
<env name="OBJECT_STORAGE_ENABLED" value="True" />
<env name="SESSION_RECORDING_ENABLE_OFFSET_HIGH_WATER_MARK_PROCESSING" value="true" />
<env name="SESSION_RECORDING_KAFKA_BATCH_SIZE" value="200" />
<env name="SESSION_RECORDING_MAX_BUFFER_AGE_SECONDS" value="180" />
<env name="SESSION_RECORDING_SUMMARY_INGESTION_ENABLED_TEAMS" value="all" />
<env name="WORKER_CONCURRENCY" value="2" />
<env name="SESSION_RECORDING_KAFKA_QUEUE_SIZE" value="600" />
</envs>
<method v="2" />
</configuration>

View File

@ -1,4 +1,5 @@
import { captureException } from '@sentry/node'
import { randomUUID } from 'crypto'
import { Redis } from 'ioredis'
import { EventEmitter } from 'node:events'
@ -143,7 +144,28 @@ export class RealtimeManager extends EventEmitter {
try {
await this.run(`clearAllMessages ${key} `, async (client) => {
return client.del(key)
/**
* We could delete the key here but (https://redis.io/commands/del/) del is O(M)
* where M is the number of items in the sorted set, for a large buffer this could be
* a lot of work.
*
* Whereas RENAME (https://redis.io/commands/rename/) is O(1)
* (_almost_ always O(1))
* """
* If newkey already exists it is overwritten, when this happens RENAME executes an implicit DEL operation,
* so if the deleted key contains a very big value it may cause high latency
* even if RENAME itself is usually a constant-time operation.
* """
* So, we rename the key to expired-<key>-<uuid>, so that it can't possibly clash
* and let it expire
*/
const pipeline = client.pipeline()
const newKey = `expired-${key}-${randomUUID()}`
pipeline.rename(`${key}`, newKey)
// renaming shouldn't affect the existing TTL
// but, we set one anyway to be sure
pipeline.expire(newKey, 1)
return pipeline.exec()
})
} catch (error) {
captureException(error, { tags: { teamId, sessionId }, extra: { key } })