0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-28 18:26:15 +01:00

feat(historical-exports): Include historical exports finishing in activity log (#12068)

* Add a is_system flag to activity logs

* Allow writing activity logs from within plugin-server

* Make changes object non-required

* Render system user information

* Log when export finishes or fails in plugin activity log

* Update activityLogLogic.plugin.test.tsx
This commit is contained in:
Karl-Aksel Puulmann 2022-10-07 13:25:16 +03:00 committed by GitHub
parent d6f83b0d86
commit 30f59dfef2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 348 additions and 28 deletions

View File

@ -73,7 +73,13 @@ export const ActivityLogRow = ({
}): JSX.Element => {
return (
<div className={clsx('activity-log-row', logItem.unread && 'unread')}>
<ProfilePicture showName={false} email={logItem.email} size={'xl'} />
<ProfilePicture
showName={false}
name={logItem.isSystem ? logItem.name : undefined}
isSystem={logItem.isSystem}
email={logItem.email ?? undefined}
size={'xl'}
/>
<div className="details">
<div className="activity-description">{logItem.description}</div>
{showExtendedDescription && logItem.extendedDescription && (

View File

@ -109,5 +109,74 @@ describe('the activity log logic', () => {
'peter added new field first example" with value added this config, removed field second example, which had value removed this config, and updated field third example from value changed from this config to value to this new config on app the changed plugin with config ID 7.'
)
})
it('can handle exports starting', async () => {
const logic = await pluginTestSetup('the changed plugin', 'job_triggered', null, null, {
job_id: '123',
job_type: 'Export historical events V2',
payload: {
dateRange: ['2022-09-05', '2022-09-07'],
},
})
const actual = logic.values.humanizedActivity
expect(render(<>{actual[0].description}</>).container).toHaveTextContent(
'peter started exporting historical events between 2022-09-05 and 2022-09-07 (inclusive).'
)
})
it('can handle some other job starting', async () => {
const logic = await pluginTestSetup('the changed plugin', 'job_triggered', null, null, {
job_id: '123',
job_type: 'someJob',
payload: {
foo: 'bar',
},
})
const actual = logic.values.humanizedActivity
expect(render(<>{actual[0].description}</>).container).toHaveTextContent(
'peter triggered job: someJob with config ID 7.'
)
expect(render(<>{actual[0].extendedDescription}</>).container).toHaveTextContent(
'Payload: { "foo": "bar" }'
)
})
it('can handle exports finishing', async () => {
const logic = await pluginTestSetup('the changed plugin', 'export_success', null, null, {
job_id: '123',
job_type: 'Export historical events V2',
payload: {
id: 1,
parallelism: 3,
dateFrom: '2021-10-29T00:00:00.000Z',
dateTo: '2021-11-05T00:00:00.000Z',
},
})
const actual = logic.values.humanizedActivity
expect(render(<>{actual[0].description}</>).container).toHaveTextContent(
'Finished exporting historical events between 2021-10-29 and 2021-11-04 (inclusive).'
)
})
it('can handle exports failing', async () => {
const logic = await pluginTestSetup('the changed plugin', 'export_fail', null, null, {
job_id: '123',
job_type: 'Export historical events V2',
payload: {
id: 1,
parallelism: 3,
dateFrom: '2021-10-29T00:00:00.000Z',
dateTo: '2021-11-05T00:00:00.000Z',
},
})
const actual = logic.values.humanizedActivity
expect(render(<>{actual[0].description}</>).container).toHaveTextContent(
'Fatal error exporting historical events between 2021-10-29 and 2021-11-04 (inclusive). Check logs for more details.'
)
})
})
})

View File

@ -65,9 +65,10 @@ export const makeTestSetup = (scope: ActivityScope, url: string) => {
name: string,
activity: string,
changes: ActivityChange[] | null,
merge?: PersonMerge
merge?: PersonMerge | null,
trigger?: Trigger | null
): Promise<ReturnType<typeof activityLogLogic.build>> => {
const activityLogItem = makeAPIItem({ scope, name, activity, changes, merge })
const activityLogItem = makeAPIItem({ scope, name, activity, changes, merge, trigger })
return await testSetup(activityLogItem, scope, url)
}
}

View File

@ -30,8 +30,9 @@ export interface ActivityLogDetail {
}
export interface ActivityUser {
email: string
email: string | null
first_name: string
is_system?: boolean
}
export enum ActivityScope {
@ -64,8 +65,9 @@ export type ChangeMapping = {
export type HumanizedChange = { description: Description | null; extendedDescription?: ExtendedDescription }
export interface HumanizedActivityLogItem {
email?: string
email?: string | null
name?: string
isSystem?: boolean
description: Description
extendedDescription?: ExtendedDescription // e.g. an insight's filters summary
created_at: dayjs.Dayjs
@ -99,6 +101,7 @@ export function humanize(
logLines.push({
email: logItem.user.email,
name: logItem.user.first_name,
isSystem: logItem.user.is_system,
description,
extendedDescription,
created_at: dayjs(logItem.created_at),

View File

@ -3,7 +3,7 @@ import { useValues } from 'kea'
import md5 from 'md5'
import React, { useState } from 'react'
import { userLogic } from 'scenes/userLogic'
import { Lettermark } from '../Lettermark/Lettermark'
import { Lettermark, LettermarkColor } from '../Lettermark/Lettermark'
import './ProfilePicture.scss'
export interface ProfilePictureProps {
@ -15,6 +15,7 @@ export interface ProfilePictureProps {
className?: string
title?: string
index?: number
isSystem?: boolean
}
export function ProfilePicture({
@ -26,6 +27,7 @@ export function ProfilePicture({
className,
index,
title,
isSystem,
}: ProfilePictureProps): JSX.Element {
const { user } = useValues(userLogic)
const [didImageError, setDidImageError] = useState(false)
@ -51,7 +53,12 @@ export function ProfilePicture({
} else {
pictureComponent = (
<span className={pictureClass} style={style}>
<Lettermark name={combinedNameAndEmail} index={index} rounded />
<Lettermark
name={combinedNameAndEmail}
index={index}
rounded
color={isSystem ? LettermarkColor.Gray : undefined}
/>
</span>
)
}

View File

@ -1,3 +1,4 @@
import { dayjs } from 'lib/dayjs'
import { ActivityLogItem, ActivityScope, HumanizedChange } from 'lib/components/ActivityLog/humanizeActivity'
import { SentenceList } from 'lib/components/ActivityLog/SentenceList'
import React from 'react'
@ -94,6 +95,37 @@ export function pluginActivityDescriber(logItem: ActivityLogItem): HumanizedChan
}
}
if (logItem.activity == 'export_success' && logItem.detail.trigger) {
const { dateFrom, dateTo } = logItem.detail.trigger.payload
const startDate = dayjs(dateFrom).format('YYYY-MM-DD')
// :TRICKY: Internally export date range is non-inclusive so transform it to be inclusive
const endDate = dayjs(dateTo).subtract(1, 'day').format('YYYY-MM-DD')
return {
description: (
<>
Finished exporting historical events between {startDate} and {endDate} (inclusive).
</>
),
}
}
if (logItem.activity == 'export_fail' && logItem.detail.trigger) {
const { dateFrom, dateTo } = logItem.detail.trigger.payload
const startDate = dayjs(dateFrom).format('YYYY-MM-DD')
// :TRICKY: Internally export date range is non-inclusive so transform it to be inclusive
const endDate = dayjs(dateTo).subtract(1, 'day').format('YYYY-MM-DD')
return {
description: (
<>
Fatal error exporting historical events between {startDate} and {endDate} (inclusive). Check logs
for more details.
</>
),
}
}
if (logItem.activity == 'config_updated') {
const changes: (string | JSX.Element)[] = []
for (const change of logItem.detail.changes || []) {

View File

@ -3,7 +3,7 @@ auth: 0012_alter_user_first_name_max_length
axes: 0006_remove_accesslog_trusted
contenttypes: 0002_remove_content_type_name
ee: 0013_silence_deprecated_tags_warnings
posthog: 0265_related_tiles
posthog: 0266_add_is_system_field_to_activity_log
rest_hooks: 0002_swappable_hook_model
sessions: 0001_initial
social_django: 0010_uid_db_index

View File

@ -0,0 +1,32 @@
import { Hub, Team, TeamId } from '../../types'
import { status } from '../status'
import { UUIDT } from '../utils'
interface Trigger {
job_type: string
job_id: string
payload: Record<string, any>
}
export async function createPluginActivityLog(
hub: Hub,
team: TeamId | Team,
pluginConfigId: number,
activity: string,
details: { trigger: Trigger }
) {
const teamObject: Team | null = typeof team === 'number' ? await hub.teamManager.fetchTeam(team) : team
if (!teamObject) {
status.warn('🤔', `Could not find team ${team} to create an actity log for. Skipping.`)
return
}
await hub.db.postgresQuery(
`
INSERT INTO posthog_activitylog (id, team_id, organization_id, activity, item_id, detail, scope, is_system, created_at)
VALUES ($1, $2, $3, $4, $5, $6, 'PluginConfig', TRUE, NOW())
`,
[new UUIDT().toString(), teamObject.id, teamObject.organization_id, activity, pluginConfigId, details],
'createPluginActivityLog'
)
}

View File

@ -39,6 +39,7 @@ import {
PluginTask,
PluginTaskType,
} from '../../../../types'
import { createPluginActivityLog } from '../../../../utils/db/activity-log'
import { processError } from '../../../../utils/db/error'
import { isTestEnv } from '../../../../utils/env-utils'
import { fetchEventsForInterval } from '../utils/fetchEventsForInterval'
@ -80,7 +81,7 @@ export interface TestFunctions {
) => Promise<CoordinationUpdate>
getExportDateRange: (params: ExportParams) => Array<[ISOTimestamp, ISOTimestamp]>
progressBar: (progress: number, length?: number) => string
stopExport: (message: string) => Promise<void>
stopExport: (params: ExportParams, message: string, status: 'success' | 'fail') => Promise<void>
shouldResume: (status: ExportChunkStatus, now: number) => void
}
@ -240,7 +241,7 @@ export function addHistoricalEventsExportCapabilityV2(
})
if (update.exportIsDone) {
await stopExport('Export has finished! 💯', { type: PluginLogEntryType.Info })
await stopExport(params, 'Export has finished! 💯', 'success')
return
}
@ -373,7 +374,7 @@ export function addHistoricalEventsExportCapabilityV2(
const message = `Exporting chunk ${dateRange(payload.startTime, payload.endTime)} failed after ${
hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT
} retries. Stopping export.`
await stopExport(message, { type: PluginLogEntryType.Error })
await stopExport(activeExportParameters, message, 'fail')
await processError(hub, pluginConfig, message)
return
}
@ -413,9 +414,11 @@ export function addHistoricalEventsExportCapabilityV2(
} catch (error) {
Sentry.captureException(error)
await processError(hub, pluginConfig, error)
await stopExport('Failed fetching events. Stopping export - please try again later.', {
type: PluginLogEntryType.Error,
})
await stopExport(
activeExportParameters,
'Failed fetching events. Stopping export - please try again later.',
'fail'
)
return
}
@ -431,7 +434,7 @@ export function addHistoricalEventsExportCapabilityV2(
{ type: PluginLogEntryType.Debug }
)
} catch (error) {
await handleExportError(error, payload, events.length)
await handleExportError(error, activeExportParameters, payload, events.length)
return
}
}
@ -451,6 +454,7 @@ export function addHistoricalEventsExportCapabilityV2(
async function handleExportError(
error: Error,
params: ExportParams,
payload: ExportHistoricalEventsJobPayload,
eventCount: number
): Promise<void> {
@ -475,13 +479,29 @@ export function addHistoricalEventsExportCapabilityV2(
.runIn(nextRetrySeconds, 'seconds')
} else {
await processError(hub, pluginConfig, error)
await stopExport(`exportEvents returned unknown error, stopping export. error=${error}`)
await stopExport(params, `exportEvents returned unknown error, stopping export. error=${error}`, 'fail')
}
}
async function stopExport(message: string, logOverrides: Partial<PluginLogEntry> = {}) {
async function stopExport(params: ExportParams, message: string, status: 'success' | 'fail') {
await meta.storage.del(EXPORT_PARAMETERS_KEY)
createLog(message, logOverrides)
await createPluginActivityLog(
hub,
pluginConfig.team_id,
pluginConfig.id,
status === 'success' ? 'export_success' : 'export_fail',
{
trigger: {
job_id: params.id.toString(),
job_type: INTERFACE_JOB_NAME,
payload: params,
},
}
)
createLog(message, {
type: status === 'success' ? PluginLogEntryType.Info : PluginLogEntryType.Error,
})
}
function getTimestampBoundaries(payload: ExportHistoricalEventsUIPayload): [ISOTimestamp, ISOTimestamp] {

View File

@ -33,6 +33,7 @@ TRUNCATE TABLE
posthog_featureflag,
posthog_featureflaghashkeyoverride,
posthog_annotation,
posthog_activitylog,
posthog_dashboarditem,
posthog_dashboard,
posthog_cohortpeople,

View File

@ -0,0 +1,81 @@
import { Hub } from '../../../src/types'
import { createPluginActivityLog } from '../../../src/utils/db/activity-log'
import { createHub } from '../../../src/utils/db/hub'
import { pluginConfig39 } from '../../helpers/plugins'
import { resetTestDatabase } from '../../helpers/sql'
jest.mock('../../../src/utils/status')
interface ActivityLog {
team_id: number | null
organization_id: number | null
user_id: number | null
is_system: boolean | null
scope: string
item_id: string
details: Record<string, any>
created_at: string
}
describe('createPluginActivityLog()', () => {
let hub: Hub
let closeHub: () => Promise<void>
beforeEach(async () => {
await resetTestDatabase()
;[hub, closeHub] = await createHub({})
})
afterEach(async () => {
await closeHub()
})
async function fetchPluginActivityLogs(hub: Hub): Promise<Array<ActivityLog>> {
const result = await hub.db.postgresQuery<ActivityLog>(
`SELECT * FROM posthog_activitylog`,
[],
'fetchPluginActivityLogs'
)
return result.rows
}
it('can read own writes', async () => {
await createPluginActivityLog(hub, pluginConfig39.team_id, pluginConfig39.id, 'job_finished', {
trigger: {
job_id: 'foobar',
job_type: 'some_type',
payload: { value: 5 },
},
})
const activityLogs = await fetchPluginActivityLogs(hub)
expect(activityLogs).toEqual([
expect.objectContaining({
id: expect.any(String),
team_id: pluginConfig39.team_id,
organization_id: expect.any(String),
user_id: null,
is_system: true,
activity: 'job_finished',
item_id: String(pluginConfig39.id),
scope: 'PluginConfig',
detail: {
trigger: {
job_id: 'foobar',
job_type: 'some_type',
payload: { value: 5 },
},
},
created_at: expect.any(String),
}),
])
})
it('does not blow up for an invalid team', async () => {
await createPluginActivityLog(hub, -1, pluginConfig39.id, 'job_finished', {} as any)
expect(await fetchPluginActivityLogs(hub)).toEqual([])
})
})

View File

@ -1,6 +1,7 @@
import { PluginMeta, RetryError } from '@posthog/plugin-scaffold'
import { Hub, ISOTimestamp, PluginConfig, PluginConfigVMInternalResponse } from '../../../../../src/types'
import { createPluginActivityLog } from '../../../../../src/utils/db/activity-log'
import { createHub } from '../../../../../src/utils/db/hub'
import { createStorage } from '../../../../../src/worker/vm/extensions/storage'
import { createUtils } from '../../../../../src/worker/vm/extensions/utilities'
@ -21,6 +22,7 @@ import { resetTestDatabase } from '../../../../helpers/sql'
jest.mock('../../../../../src/utils/status')
jest.mock('../../../../../src/worker/vm/upgrades/utils/fetchEventsForInterval')
jest.mock('../../../../../src/utils/db/activity-log')
const ONE_HOUR = 1000 * 60 * 60
@ -811,18 +813,56 @@ describe('addHistoricalEventsExportCapabilityV2()', () => {
describe('stopExport()', () => {
const stopExport = getTestMethod('stopExport')
it('unsets EXPORT_PARAMETERS_KEY', async () => {
await storage().set(EXPORT_PARAMETERS_KEY, {
id: 1,
parallelism: 3,
dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp,
dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp,
})
const params = {
id: 1,
parallelism: 3,
dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp,
dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp,
}
await stopExport('')
it('unsets EXPORT_PARAMETERS_KEY', async () => {
await storage().set(EXPORT_PARAMETERS_KEY, params)
await stopExport(params, '', 'success')
expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null)
})
it('captures activity for export success', async () => {
await stopExport(params, '', 'success')
expect(createPluginActivityLog).toHaveBeenCalledWith(
hub,
pluginConfig39.team_id,
pluginConfig39.id,
'export_success',
{
trigger: {
job_id: '1',
job_type: INTERFACE_JOB_NAME,
payload: params,
},
}
)
})
it('captures activity for export failure', async () => {
await stopExport(params, '', 'fail')
expect(createPluginActivityLog).toHaveBeenCalledWith(
hub,
pluginConfig39.team_id,
pluginConfig39.id,
'export_fail',
{
trigger: {
job_id: '1',
job_type: INTERFACE_JOB_NAME,
payload: params,
},
}
)
})
})
describe('shouldResume()', () => {

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.15 on 2022-09-30 12:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("posthog", "0265_related_tiles"),
]
operations = [
migrations.AddField(
model_name="activitylog",
name="is_system",
field=models.BooleanField(null=True),
),
]

View File

@ -77,6 +77,9 @@ class ActivityLog(UUIDModel):
team_id = models.PositiveIntegerField(null=True)
organization_id = models.UUIDField(null=True)
user = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL)
# If truthy, user can be unset and this indicates a 'system' user made activity asynchronously
is_system = models.BooleanField(null=True)
activity = models.fields.CharField(max_length=79, null=False)
# if scoped to a model this activity log holds the id of the model being logged
# if not scoped to a model this log might not hold an item_id

View File

@ -1,6 +1,7 @@
from rest_framework import serializers
from posthog.models import User
from posthog.models.activity_logging.activity_log import ActivityLog
class UserMinimalSerializer(serializers.ModelSerializer):
@ -32,7 +33,7 @@ class TriggerSerializer(serializers.Serializer):
class DetailSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
changes = ChangeSerializer(many=True)
changes = ChangeSerializer(many=True, required=False)
merge = MergeSerializer(required=False)
trigger = TriggerSerializer(required=False)
name = serializers.CharField(read_only=True)
@ -43,9 +44,15 @@ class ActivityLogSerializer(serializers.Serializer):
class Meta:
exclude = ["team_id, organization_id"]
user = UserMinimalSerializer(read_only=True)
user = serializers.SerializerMethodField()
activity = serializers.CharField(read_only=True)
scope = serializers.CharField(read_only=True)
item_id = serializers.CharField(read_only=True)
detail = DetailSerializer(required=False)
created_at = serializers.DateTimeField(read_only=True)
def get_user(self, activity_log: ActivityLog):
if activity_log.is_system:
return {"first_name": "System", "email": None, "is_system": True}
else:
return UserMinimalSerializer(activity_log.user).data