2022-11-08 07:56:19 +01:00
|
|
|
import ClickHouse from '@posthog/clickhouse'
|
2023-05-26 16:29:52 +02:00
|
|
|
import { makeWorkerUtils, WorkerUtils } from 'graphile-worker'
|
2022-11-08 07:56:19 +01:00
|
|
|
import Redis from 'ioredis'
|
2022-12-15 19:28:43 +01:00
|
|
|
import parsePrometheusTextFormat from 'parse-prometheus-text-format'
|
2023-08-24 11:09:10 +02:00
|
|
|
import { PoolClient } from 'pg'
|
2022-11-08 07:56:19 +01:00
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
import { defaultConfig } from '../src/config/config'
|
2023-10-11 15:23:41 +02:00
|
|
|
import { KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS } from '../src/config/kafka-topics'
|
2023-01-06 09:51:51 +01:00
|
|
|
import {
|
2023-05-04 13:25:56 +02:00
|
|
|
Hook,
|
2023-04-28 16:27:35 +02:00
|
|
|
Plugin,
|
|
|
|
PluginConfig,
|
2023-01-06 09:51:51 +01:00
|
|
|
PluginLogEntry,
|
|
|
|
RawAction,
|
|
|
|
RawClickHouseEvent,
|
2023-10-11 15:23:41 +02:00
|
|
|
RawSessionReplayEvent,
|
2023-01-06 09:51:51 +01:00
|
|
|
} from '../src/types'
|
2023-08-24 11:09:10 +02:00
|
|
|
import { PostgresRouter, PostgresUse } from '../src/utils/db/postgres'
|
2022-11-08 07:56:19 +01:00
|
|
|
import { parseRawClickHouseEvent } from '../src/utils/event'
|
2023-08-24 11:09:10 +02:00
|
|
|
import { createPostgresPool, UUIDT } from '../src/utils/utils'
|
2023-11-27 19:41:36 +01:00
|
|
|
import { RawAppMetric } from '../src/worker/ingestion/app-metrics'
|
2022-11-08 07:56:19 +01:00
|
|
|
import { insertRow } from '../tests/helpers/sql'
|
2023-05-03 16:42:16 +02:00
|
|
|
import { waitForExpect } from './expectations'
|
2023-03-01 12:03:13 +01:00
|
|
|
import { produce } from './kafka'
|
2022-11-08 07:56:19 +01:00
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
let clickHouseClient: ClickHouse
|
2023-08-24 11:09:10 +02:00
|
|
|
export let postgres: PostgresRouter
|
2023-11-27 16:51:39 +01:00
|
|
|
export let redis: Redis.Redis
|
2023-05-26 16:29:52 +02:00
|
|
|
let graphileWorker: WorkerUtils
|
2023-03-01 13:03:54 +01:00
|
|
|
|
2023-05-26 16:29:52 +02:00
|
|
|
beforeAll(async () => {
|
2023-03-01 13:03:54 +01:00
|
|
|
// Setup connections to kafka, clickhouse, and postgres
|
2023-08-24 11:09:10 +02:00
|
|
|
postgres = new PostgresRouter({ ...defaultConfig, POSTGRES_CONNECTION_POOL_SIZE: 1 }, null)
|
2023-05-26 16:29:52 +02:00
|
|
|
graphileWorker = await makeWorkerUtils({
|
2024-02-29 10:34:28 +01:00
|
|
|
pgPool: createPostgresPool(defaultConfig.DATABASE_URL!, 1, 'functional_tests'),
|
2023-05-26 16:29:52 +02:00
|
|
|
})
|
2023-03-01 13:03:54 +01:00
|
|
|
clickHouseClient = new ClickHouse({
|
|
|
|
host: defaultConfig.CLICKHOUSE_HOST,
|
|
|
|
port: 8123,
|
|
|
|
dataObjects: true,
|
|
|
|
queryOptions: {
|
|
|
|
database: defaultConfig.CLICKHOUSE_DATABASE,
|
|
|
|
output_format_json_quote_64bit_integers: false,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
redis = new Redis(defaultConfig.REDIS_URL)
|
|
|
|
})
|
|
|
|
|
|
|
|
afterAll(async () => {
|
|
|
|
await Promise.all([postgres.end(), redis.disconnect()])
|
|
|
|
})
|
|
|
|
|
2023-03-01 12:03:13 +01:00
|
|
|
export const capture = async ({
|
|
|
|
teamId,
|
|
|
|
distinctId,
|
|
|
|
uuid,
|
|
|
|
event,
|
|
|
|
properties = {},
|
|
|
|
token = null,
|
|
|
|
sentAt = new Date(),
|
|
|
|
eventTime = new Date(),
|
|
|
|
now = new Date(),
|
2023-05-01 14:09:38 +02:00
|
|
|
$set = undefined,
|
|
|
|
$set_once = undefined,
|
2023-10-11 15:23:41 +02:00
|
|
|
topic = ['$performance_event', '$snapshot_items'].includes(event)
|
|
|
|
? KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS
|
2024-07-17 14:11:49 +02:00
|
|
|
: ['$$client_ingestion_warning'].includes(event)
|
|
|
|
? 'client_iwarnings_ingestion'
|
2023-04-25 18:03:43 +02:00
|
|
|
: 'events_plugin_ingestion',
|
2023-03-01 12:03:13 +01:00
|
|
|
}: {
|
|
|
|
teamId: number | null
|
|
|
|
distinctId: string
|
|
|
|
uuid: string
|
|
|
|
event: string
|
|
|
|
properties?: object
|
|
|
|
token?: string | null
|
|
|
|
sentAt?: Date
|
|
|
|
eventTime?: Date
|
|
|
|
now?: Date
|
|
|
|
topic?: string
|
2023-05-01 14:09:38 +02:00
|
|
|
$set?: object
|
|
|
|
$set_once?: object
|
2023-03-01 12:03:13 +01:00
|
|
|
}) => {
|
2023-01-04 11:22:38 +01:00
|
|
|
// WARNING: this capture method is meant to simulate the ingestion of events
|
2023-10-11 15:23:41 +02:00
|
|
|
// from the capture endpoint, but there is no guarantee that it is 100%
|
2023-01-04 11:22:38 +01:00
|
|
|
// accurate.
|
2023-03-01 12:03:13 +01:00
|
|
|
return await produce({
|
|
|
|
topic,
|
|
|
|
message: Buffer.from(
|
|
|
|
JSON.stringify({
|
|
|
|
token,
|
|
|
|
distinct_id: distinctId,
|
|
|
|
ip: '',
|
|
|
|
site_url: '',
|
|
|
|
team_id: teamId,
|
|
|
|
now: now,
|
|
|
|
sent_at: sentAt,
|
|
|
|
uuid: uuid,
|
|
|
|
data: JSON.stringify({
|
|
|
|
event,
|
|
|
|
properties: { ...properties, uuid },
|
2022-11-08 07:56:19 +01:00
|
|
|
team_id: teamId,
|
2023-03-01 12:03:13 +01:00
|
|
|
timestamp: eventTime,
|
2023-05-01 14:09:38 +02:00
|
|
|
$set,
|
|
|
|
$set_once,
|
2022-11-08 07:56:19 +01:00
|
|
|
}),
|
2023-03-01 12:03:13 +01:00
|
|
|
})
|
|
|
|
),
|
|
|
|
key: teamId ? teamId.toString() : '',
|
2024-03-25 14:01:15 +01:00
|
|
|
waitForAck: true,
|
2022-11-08 07:56:19 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-26 19:17:14 +02:00
|
|
|
export const createPluginAttachment = async ({
|
|
|
|
teamId,
|
|
|
|
pluginConfigId,
|
|
|
|
fileSize,
|
|
|
|
contentType,
|
|
|
|
fileName,
|
|
|
|
key,
|
|
|
|
contents,
|
|
|
|
}: {
|
|
|
|
teamId: number
|
|
|
|
pluginConfigId: number
|
|
|
|
fileSize: number
|
|
|
|
contentType: string
|
|
|
|
fileName: string
|
|
|
|
key: string
|
|
|
|
contents: string
|
2023-07-11 16:08:29 +02:00
|
|
|
client?: PoolClient
|
2023-05-26 19:17:14 +02:00
|
|
|
}) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
return await insertRow(postgres, 'posthog_pluginattachment', {
|
2023-05-26 19:17:14 +02:00
|
|
|
team_id: teamId,
|
|
|
|
plugin_config_id: pluginConfigId,
|
|
|
|
key: key,
|
|
|
|
content_type: contentType,
|
|
|
|
file_name: fileName,
|
|
|
|
file_size: fileSize,
|
|
|
|
contents: contents,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const createPlugin = async (plugin: Omit<Plugin, 'id'>) => {
|
|
|
|
return await insertRow(postgres, 'posthog_plugin', {
|
2022-11-08 07:56:19 +01:00
|
|
|
...plugin,
|
|
|
|
config_schema: {},
|
|
|
|
from_json: false,
|
|
|
|
from_web: false,
|
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
updated_at: new Date().toISOString(),
|
|
|
|
is_preinstalled: false,
|
|
|
|
capabilities: {},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
export const createPluginConfig = async (
|
2023-11-27 19:41:36 +01:00
|
|
|
pluginConfig: Omit<PluginConfig, 'id' | 'created_at' | 'enabled' | 'order'>,
|
2023-11-02 15:40:33 +01:00
|
|
|
enabled = true
|
2022-11-08 07:56:19 +01:00
|
|
|
) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
return await insertRow(postgres, 'posthog_pluginconfig', {
|
2022-11-08 07:56:19 +01:00
|
|
|
...pluginConfig,
|
2023-05-26 20:00:45 +02:00
|
|
|
config: pluginConfig.config ?? {},
|
2022-11-08 07:56:19 +01:00
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
updated_at: new Date().toISOString(),
|
2023-11-02 15:40:33 +01:00
|
|
|
enabled,
|
2022-11-08 07:56:19 +01:00
|
|
|
order: 0,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-03 16:42:16 +02:00
|
|
|
export const getPluginConfig = async (teamId: number, pluginId: number) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
const queryResult = (await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`SELECT *
|
|
|
|
FROM posthog_pluginconfig
|
|
|
|
WHERE team_id = $1
|
|
|
|
AND id = $2`,
|
|
|
|
[teamId, pluginId],
|
|
|
|
'getPluginConfig'
|
|
|
|
)) as { rows: any[] }
|
2023-05-03 16:42:16 +02:00
|
|
|
return queryResult.rows[0]
|
|
|
|
}
|
|
|
|
|
|
|
|
export const updatePluginConfig = async (
|
|
|
|
teamId: number,
|
|
|
|
pluginConfigId: string,
|
|
|
|
pluginConfig: Partial<PluginConfig>
|
|
|
|
) => {
|
|
|
|
await postgres.query(
|
2023-08-24 11:09:10 +02:00
|
|
|
PostgresUse.COMMON_WRITE,
|
2023-05-03 16:42:16 +02:00
|
|
|
`UPDATE posthog_pluginconfig SET config = $1, updated_at = $2 WHERE id = $3 AND team_id = $4`,
|
2023-08-24 11:09:10 +02:00
|
|
|
[pluginConfig.config ?? {}, pluginConfig.updated_at, pluginConfigId, teamId],
|
|
|
|
'updatePluginConfig'
|
2023-05-03 16:42:16 +02:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
export const reloadPlugins = async () => await redis.publish('reload-plugins', '')
|
|
|
|
|
2023-05-26 19:17:14 +02:00
|
|
|
export const waitForPluginToLoad = (pluginConfig: any) => {
|
|
|
|
return waitForExpect(async () => {
|
|
|
|
const logEntries = await fetchPluginLogEntries(pluginConfig.id)
|
|
|
|
const setUp = logEntries.filter(({ message }) => message.includes('Plugin loaded'))
|
|
|
|
expect(setUp.length).toBeGreaterThan(0)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const createAndReloadPluginConfig = async (teamId: number, pluginId: number) => {
|
2023-05-26 20:00:45 +02:00
|
|
|
const pluginConfig = await createPluginConfig({ team_id: teamId, plugin_id: pluginId, config: {} })
|
2023-05-03 16:42:16 +02:00
|
|
|
await reloadPlugins()
|
|
|
|
// We wait for some log entries for the plugin, to make sure it's ready to
|
|
|
|
// process events.
|
2023-05-26 19:17:14 +02:00
|
|
|
await waitForPluginToLoad(pluginConfig)
|
2022-11-08 07:56:19 +01:00
|
|
|
return pluginConfig
|
|
|
|
}
|
|
|
|
|
2023-05-26 16:29:52 +02:00
|
|
|
export const disablePluginConfig = async (teamId: number, pluginConfigId: number) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`UPDATE posthog_pluginconfig
|
|
|
|
SET enabled = false
|
|
|
|
WHERE id = $1
|
|
|
|
AND team_id = $2`,
|
|
|
|
[pluginConfigId, teamId],
|
|
|
|
'disablePluginConfig'
|
|
|
|
)
|
2023-05-26 16:29:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
export const enablePluginConfig = async (teamId: number, pluginConfigId: number) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`UPDATE posthog_pluginconfig
|
|
|
|
SET enabled = true
|
|
|
|
WHERE id = $1
|
|
|
|
AND team_id = $2`,
|
|
|
|
[pluginConfigId, teamId],
|
|
|
|
'enablePluginConfig'
|
|
|
|
)
|
2023-05-26 16:29:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
export const schedulePluginJob = async ({
|
|
|
|
teamId,
|
|
|
|
pluginConfigId,
|
|
|
|
type,
|
|
|
|
taskType,
|
|
|
|
payload,
|
|
|
|
}: {
|
|
|
|
teamId: number
|
|
|
|
pluginConfigId: number
|
|
|
|
type: string
|
|
|
|
taskType: string
|
|
|
|
payload: any
|
|
|
|
}) => {
|
|
|
|
return await graphileWorker.addJob(taskType, { teamId, pluginConfigId, type, payload })
|
|
|
|
}
|
|
|
|
|
|
|
|
export const getScheduledPluginJob = async (jobId: string) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
const result = await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`SELECT *
|
|
|
|
FROM graphile_worker.jobs
|
|
|
|
WHERE id = $1`,
|
|
|
|
[jobId],
|
|
|
|
'getScheduledPluginJob'
|
|
|
|
)
|
2023-05-26 16:29:52 +02:00
|
|
|
return result.rows[0]
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const reloadAction = async (teamId: number, actionId: number) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
await redis.publish('reload-action', JSON.stringify({ teamId, actionId }))
|
|
|
|
}
|
|
|
|
|
2024-07-17 14:11:49 +02:00
|
|
|
export const fetchIngestionWarnings = async (teamId: number) => {
|
|
|
|
const queryResult = (await clickHouseClient.querying(`
|
|
|
|
SELECT *,
|
|
|
|
FROM ingestion_warnings
|
|
|
|
WHERE team_id = ${teamId}
|
|
|
|
ORDER BY timestamp ASC
|
|
|
|
`)) as unknown as ClickHouse.ObjectQueryResult<any>
|
|
|
|
return queryResult.data.map((warning) => ({ ...warning, details: JSON.parse(warning.details) }))
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const fetchEvents = async (teamId: number, uuid?: string) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
const queryResult = (await clickHouseClient.querying(`
|
2023-04-28 16:27:35 +02:00
|
|
|
SELECT *,
|
|
|
|
if(notEmpty(overrides.person_id), overrides.person_id, e.person_id) as person_id
|
|
|
|
FROM events e
|
2024-07-11 17:24:54 +02:00
|
|
|
LEFT OUTER JOIN (
|
|
|
|
SELECT
|
|
|
|
distinct_id,
|
|
|
|
argMax(person_id, version) as person_id
|
|
|
|
FROM person_distinct_id_overrides
|
2023-04-28 16:27:35 +02:00
|
|
|
WHERE team_id = ${teamId}
|
2024-07-11 17:24:54 +02:00
|
|
|
GROUP BY distinct_id
|
|
|
|
) AS overrides USING distinct_id
|
2023-04-28 16:27:35 +02:00
|
|
|
WHERE team_id = ${teamId} ${uuid ? `AND uuid = '${uuid}'` : ``}
|
|
|
|
ORDER BY timestamp ASC
|
2022-11-08 07:56:19 +01:00
|
|
|
`)) as unknown as ClickHouse.ObjectQueryResult<RawClickHouseEvent>
|
|
|
|
return queryResult.data.map(parseRawClickHouseEvent)
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const fetchPersons = async (teamId: number) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
const queryResult = (await clickHouseClient.querying(
|
|
|
|
`SELECT * FROM person WHERE team_id = ${teamId} ORDER BY created_at ASC`
|
2022-12-29 22:54:44 +01:00
|
|
|
)) as unknown as ClickHouse.ObjectQueryResult<any>
|
|
|
|
return queryResult.data.map((person) => ({ ...person, properties: JSON.parse(person.properties) }))
|
2022-11-08 07:56:19 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 13:07:58 +02:00
|
|
|
export const fetchGroups = async (teamId: number) => {
|
|
|
|
const queryResult = (await clickHouseClient.querying(
|
|
|
|
`SELECT * FROM groups WHERE team_id = ${teamId} ORDER BY created_at ASC`
|
|
|
|
)) as unknown as ClickHouse.ObjectQueryResult<any>
|
|
|
|
return queryResult.data.map((group) => ({ ...group, group_properties: JSON.parse(group.group_properties) }))
|
|
|
|
}
|
|
|
|
|
2024-11-01 00:12:28 +01:00
|
|
|
export const createGroupType = async (teamId: number, projectId: number, index: number, groupType: string) => {
|
2024-07-22 22:21:46 +02:00
|
|
|
await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`
|
2024-11-01 00:12:28 +01:00
|
|
|
INSERT INTO posthog_grouptypemapping (team_id, project_id, group_type, group_type_index)
|
|
|
|
VALUES ($1, $2, $3, $4)
|
2024-07-22 22:21:46 +02:00
|
|
|
`,
|
2024-11-01 00:12:28 +01:00
|
|
|
[teamId, projectId, groupType, index],
|
2024-07-22 22:21:46 +02:00
|
|
|
'insertGroupType'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
export const createGroup = async (
|
|
|
|
teamId: number,
|
|
|
|
groupTypeIndex: number,
|
|
|
|
groupKey: string,
|
|
|
|
groupProperties: Record<string, any>
|
|
|
|
) => {
|
|
|
|
await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`
|
|
|
|
INSERT INTO posthog_group (team_id, group_key, group_type_index, group_properties, created_at, properties_last_updated_at, properties_last_operation, version)
|
|
|
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
|
|
`,
|
|
|
|
[
|
|
|
|
teamId,
|
|
|
|
groupKey,
|
|
|
|
groupTypeIndex,
|
|
|
|
JSON.stringify(groupProperties),
|
|
|
|
new Date().toISOString(),
|
|
|
|
JSON.stringify({}),
|
|
|
|
JSON.stringify({}),
|
|
|
|
1,
|
|
|
|
],
|
|
|
|
'upsertGroup'
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const fetchPostgresPersons = async (teamId: number) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
const { rows } = await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`SELECT *
|
|
|
|
FROM posthog_person
|
|
|
|
WHERE team_id = $1`,
|
|
|
|
[teamId],
|
|
|
|
'fetchPostgresPersons'
|
|
|
|
)
|
2023-01-17 13:04:03 +01:00
|
|
|
return rows
|
|
|
|
}
|
|
|
|
|
2023-10-11 15:23:41 +02:00
|
|
|
export const fetchSessionReplayEvents = async (teamId: number, sessionId?: string) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
const queryResult = (await clickHouseClient.querying(
|
2023-10-11 15:23:41 +02:00
|
|
|
`SELECT min(min_first_timestamp) as min_fs_ts, any(team_id), any(distinct_id), session_id FROM session_replay_events WHERE team_id = ${teamId} ${
|
|
|
|
sessionId ? ` AND session_id = '${sessionId}'` : ''
|
|
|
|
} group by session_id ORDER BY min_fs_ts ASC`
|
|
|
|
)) as unknown as ClickHouse.ObjectQueryResult<RawSessionReplayEvent>
|
2022-11-08 07:56:19 +01:00
|
|
|
return queryResult.data.map((event) => {
|
|
|
|
return {
|
|
|
|
...event,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-03 16:42:16 +02:00
|
|
|
export const fetchPluginConsoleLogEntries = async (pluginConfigId: number) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
const { data: logEntries } = (await clickHouseClient.querying(`
|
|
|
|
SELECT * FROM plugin_log_entries
|
|
|
|
WHERE plugin_config_id = ${pluginConfigId} AND source = 'CONSOLE'
|
|
|
|
`)) as unknown as ClickHouse.ObjectQueryResult<PluginLogEntry>
|
|
|
|
return logEntries.map((entry) => ({ ...entry, message: JSON.parse(entry.message) }))
|
|
|
|
}
|
|
|
|
|
2023-05-03 16:42:16 +02:00
|
|
|
export const fetchPluginLogEntries = async (pluginConfigId: number) => {
|
|
|
|
const { data: logEntries } = (await clickHouseClient.querying(`
|
|
|
|
SELECT * FROM plugin_log_entries
|
|
|
|
WHERE plugin_config_id = ${pluginConfigId}
|
|
|
|
`)) as unknown as ClickHouse.ObjectQueryResult<PluginLogEntry>
|
|
|
|
return logEntries
|
|
|
|
}
|
|
|
|
|
2023-11-27 19:41:36 +01:00
|
|
|
export const fetchPluginAppMetrics = async (pluginConfigId: number) => {
|
|
|
|
const { data: appMetrics } = (await clickHouseClient.querying(`
|
|
|
|
SELECT * FROM app_metrics
|
|
|
|
WHERE plugin_config_id = ${pluginConfigId} ORDER BY timestamp
|
|
|
|
`)) as unknown as ClickHouse.ObjectQueryResult<RawAppMetric>
|
|
|
|
return appMetrics
|
|
|
|
}
|
|
|
|
|
2023-05-04 13:25:56 +02:00
|
|
|
export const createOrganization = async (organizationProperties = {}) => {
|
2022-11-08 07:56:19 +01:00
|
|
|
const organizationId = new UUIDT().toString()
|
2023-03-01 13:03:54 +01:00
|
|
|
await insertRow(postgres, 'posthog_organization', {
|
2022-11-08 07:56:19 +01:00
|
|
|
id: organizationId,
|
|
|
|
name: 'TEST ORG',
|
|
|
|
plugins_access_level: 9,
|
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
updated_at: new Date().toISOString(),
|
|
|
|
personalization: '{}', // DEPRECATED
|
|
|
|
setup_section_2_completed: true, // DEPRECATED
|
|
|
|
for_internal_metrics: false,
|
|
|
|
domain_whitelist: [],
|
2024-05-20 23:24:32 +02:00
|
|
|
available_product_features: [],
|
2022-11-08 07:56:19 +01:00
|
|
|
is_member_join_email_enabled: false,
|
|
|
|
slug: Math.round(Math.random() * 20000),
|
2023-05-04 13:25:56 +02:00
|
|
|
...organizationProperties,
|
2022-11-08 07:56:19 +01:00
|
|
|
})
|
|
|
|
return organizationId
|
|
|
|
}
|
|
|
|
|
2024-05-29 02:29:24 +02:00
|
|
|
export const createOrganizationRaw = async (organizationProperties = {}) => {
|
|
|
|
const organizationId = new UUIDT().toString()
|
|
|
|
|
|
|
|
const properties = {
|
|
|
|
id: organizationId,
|
|
|
|
name: 'TEST ORG',
|
|
|
|
plugins_access_level: 9,
|
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
updated_at: new Date().toISOString(),
|
|
|
|
personalization: '{}', // DEPRECATED
|
|
|
|
setup_section_2_completed: true, // DEPRECATED
|
|
|
|
for_internal_metrics: false,
|
|
|
|
domain_whitelist: '{}',
|
|
|
|
available_product_features: '{}',
|
|
|
|
is_member_join_email_enabled: false,
|
|
|
|
slug: Math.round(Math.random() * 20000),
|
|
|
|
...organizationProperties,
|
|
|
|
}
|
|
|
|
|
|
|
|
const keys = Object.keys(properties)
|
|
|
|
.map((key) => `"${key}"`)
|
|
|
|
.join(',')
|
|
|
|
|
|
|
|
const values = Object.values(properties)
|
|
|
|
.map((value) => {
|
|
|
|
if (Array.isArray(value) && value.length > 0) {
|
|
|
|
return JSON.stringify(value)
|
|
|
|
} else if (typeof value === 'string' && !value.includes('array')) {
|
|
|
|
return `'${value || null}'`
|
|
|
|
}
|
|
|
|
|
|
|
|
return value
|
|
|
|
})
|
|
|
|
.join(',')
|
|
|
|
|
|
|
|
await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
2024-11-01 00:12:28 +01:00
|
|
|
`INSERT into posthog_organization
|
2024-05-29 02:29:24 +02:00
|
|
|
(${keys})
|
|
|
|
VALUES (${values})
|
|
|
|
`,
|
|
|
|
undefined,
|
|
|
|
''
|
|
|
|
)
|
|
|
|
return organizationId
|
|
|
|
}
|
|
|
|
|
2022-11-23 13:55:26 +01:00
|
|
|
export const createTeam = async (
|
|
|
|
organizationId: string,
|
|
|
|
slack_incoming_webhook?: string,
|
2023-01-27 13:36:45 +01:00
|
|
|
token?: string,
|
|
|
|
sessionRecordingOptIn = true
|
2022-11-23 13:55:26 +01:00
|
|
|
) => {
|
2024-03-14 13:45:28 +01:00
|
|
|
const id = Math.round(Math.random() * 1000000000)
|
|
|
|
await insertRow(postgres, 'posthog_project', {
|
|
|
|
// Every team (aka environment) must be a child of a project
|
|
|
|
id,
|
2022-11-08 07:56:19 +01:00
|
|
|
organization_id: organizationId,
|
2024-03-14 13:45:28 +01:00
|
|
|
name: 'TEST PROJECT',
|
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
})
|
|
|
|
await insertRow(postgres, 'posthog_team', {
|
|
|
|
id,
|
|
|
|
organization_id: organizationId,
|
|
|
|
project_id: id,
|
2022-11-08 07:56:19 +01:00
|
|
|
app_urls: [],
|
|
|
|
name: 'TEST PROJECT',
|
|
|
|
event_names: [],
|
|
|
|
event_names_with_usage: [],
|
|
|
|
event_properties: [],
|
|
|
|
event_properties_with_usage: [],
|
|
|
|
event_properties_numerical: [],
|
|
|
|
created_at: new Date().toISOString(),
|
|
|
|
updated_at: new Date().toISOString(),
|
|
|
|
anonymize_ips: false,
|
|
|
|
completed_snippet_onboarding: true,
|
|
|
|
ingested_event: true,
|
|
|
|
uuid: new UUIDT().toString(),
|
2023-01-27 13:36:45 +01:00
|
|
|
session_recording_opt_in: sessionRecordingOptIn,
|
2022-11-08 07:56:19 +01:00
|
|
|
plugins_opt_in: false,
|
|
|
|
opt_out_capture: false,
|
|
|
|
is_demo: false,
|
2022-11-23 13:55:26 +01:00
|
|
|
api_token: token ?? new UUIDT().toString(),
|
2022-11-08 07:56:19 +01:00
|
|
|
test_account_filters: [],
|
|
|
|
timezone: 'UTC',
|
|
|
|
data_attributes: ['data-attr'],
|
|
|
|
person_display_name_properties: [],
|
|
|
|
access_control: false,
|
|
|
|
slack_incoming_webhook,
|
|
|
|
})
|
2024-03-14 13:45:28 +01:00
|
|
|
return id
|
2022-11-08 07:56:19 +01:00
|
|
|
}
|
|
|
|
|
2024-05-08 09:46:52 +02:00
|
|
|
export const createAction = async (action: Omit<RawAction, 'id'>) => {
|
2023-03-01 13:03:54 +01:00
|
|
|
const actionRow = await insertRow(postgres, 'posthog_action', action)
|
2022-11-08 07:56:19 +01:00
|
|
|
return actionRow
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const createUser = async (teamId: number, email: string) => {
|
|
|
|
return await insertRow(postgres, 'posthog_user', {
|
2022-11-08 07:56:19 +01:00
|
|
|
password: 'abc',
|
|
|
|
email,
|
|
|
|
first_name: '',
|
|
|
|
last_name: '',
|
|
|
|
email_opt_in: false,
|
|
|
|
distinct_id: email,
|
|
|
|
is_staff: false,
|
|
|
|
is_active: true,
|
|
|
|
date_joined: new Date().toISOString(),
|
|
|
|
events_column_config: '{}',
|
|
|
|
uuid: new UUIDT().toString(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-04 13:25:56 +02:00
|
|
|
export async function createHook(teamId: number, userId: number, resourceId: number, target: string) {
|
|
|
|
await insertRow(postgres, 'ee_hook', {
|
|
|
|
id: new UUIDT().toString(),
|
|
|
|
team_id: teamId,
|
|
|
|
user_id: userId,
|
|
|
|
resource_id: resourceId,
|
|
|
|
event: 'action_performed',
|
|
|
|
target: target,
|
|
|
|
created: new Date().toISOString(),
|
|
|
|
updated: new Date().toISOString(),
|
|
|
|
} as Hook)
|
|
|
|
}
|
|
|
|
|
2023-03-01 13:03:54 +01:00
|
|
|
export const getPropertyDefinitions = async (teamId: number) => {
|
2023-08-24 11:09:10 +02:00
|
|
|
const { rows } = await postgres.query(
|
|
|
|
PostgresUse.COMMON_WRITE,
|
|
|
|
`SELECT *
|
|
|
|
FROM posthog_propertydefinition
|
|
|
|
WHERE team_id = $1`,
|
|
|
|
[teamId],
|
|
|
|
'getPropertyDefinitions'
|
|
|
|
)
|
2022-11-08 07:56:19 +01:00
|
|
|
return rows
|
|
|
|
}
|
2022-12-15 19:28:43 +01:00
|
|
|
|
|
|
|
export const getMetric = async ({ name, type, labels }: Record<string, any>) => {
|
|
|
|
// Requests `/_metrics` and extracts the value of the first metric we find
|
|
|
|
// that matches name, type, and labels.
|
|
|
|
//
|
|
|
|
// Returns 0 if no metric is found.
|
|
|
|
const openMetrics = await (await fetch('http://localhost:6738/_metrics')).text()
|
|
|
|
return Number.parseFloat(
|
|
|
|
parsePrometheusTextFormat(openMetrics)
|
|
|
|
.filter((metric) => deepObjectContains(metric, { name, type }))[0]
|
|
|
|
?.metrics.filter((values) => deepObjectContains(values, { labels }))[0]?.value ?? 0
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
const deepObjectContains = (obj: Record<string, any>, other: Record<string, any>): boolean => {
|
|
|
|
// Returns true if `obj` contains all the keys in `other` and their values
|
|
|
|
// are equal. If the values are objects, recursively checks if they contain
|
|
|
|
// the keys in `other`.
|
|
|
|
|
|
|
|
return Object.keys(other).every((key) => {
|
|
|
|
if (typeof other[key] === 'object') {
|
|
|
|
return deepObjectContains(obj[key], other[key])
|
|
|
|
}
|
|
|
|
return obj[key] === other[key]
|
|
|
|
})
|
|
|
|
}
|