0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-24 09:14:46 +01:00

feat(bi): Insight variables rollups on dashboards (#25545)

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Tom Owers 2024-10-17 09:49:35 +01:00 committed by GitHub
parent c18fe6f282
commit c7dc6a115a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
47 changed files with 1053 additions and 269 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 159 KiB

After

Width:  |  Height:  |  Size: 159 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

After

Width:  |  Height:  |  Size: 124 KiB

View File

@ -16,6 +16,7 @@ import {
DatabaseSerializedFieldType,
ErrorTrackingGroup,
HogCompileResponse,
HogQLVariable,
QuerySchema,
QueryStatusResponse,
RecordingsQuery,
@ -962,7 +963,8 @@ const api = {
shortId: InsightModel['short_id'],
basic?: boolean,
refresh?: RefreshType,
filtersOverride?: DashboardFilter | null
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null
): Promise<PaginatedResponse<Partial<InsightModel>>> {
return new ApiRequest()
.insights()
@ -972,6 +974,7 @@ const api = {
basic,
refresh,
filters_override: filtersOverride,
variables_override: variablesOverride,
})
)
.get()
@ -2429,7 +2432,8 @@ const api = {
queryId?: string,
refresh?: boolean,
async?: boolean,
filtersOverride?: DashboardFilter | null
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null
): Promise<
T extends { [response: string]: any }
? T['response'] extends infer P | undefined
@ -2440,7 +2444,13 @@ const api = {
const refreshParam: RefreshType | undefined = refresh && async ? 'force_async' : async ? 'async' : refresh
return await new ApiRequest().query().create({
...options,
data: { query, client_query_id: queryId, refresh: refreshParam, filters_override: filtersOverride },
data: {
query,
client_query_id: queryId,
refresh: refreshParam,
filters_override: filtersOverride,
variables_override: variablesOverride,
},
})
},

View File

@ -11,6 +11,7 @@ import { insightLogic } from 'scenes/insights/insightLogic'
import { ErrorBoundary } from '~/layout/ErrorBoundary'
import { themeLogic } from '~/layout/navigation-3000/themeLogic'
import { Query } from '~/queries/Query/Query'
import { HogQLVariable } from '~/queries/schema'
import {
DashboardBasicType,
DashboardPlacement,
@ -60,6 +61,8 @@ export interface InsightCardProps extends Resizeable, React.HTMLAttributes<HTMLD
/** Priority for loading the insight, lower is earlier. */
loadPriority?: number
doNotLoad?: boolean
/** Dashboard variables to override the ones in the insight */
variablesOverride?: Record<string, HogQLVariable>
}
function InsightCardInternal(
@ -90,6 +93,7 @@ function InsightCardInternal(
placement,
loadPriority,
doNotLoad,
variablesOverride,
...divProps
}: InsightCardProps,
ref: React.Ref<HTMLDivElement>
@ -141,6 +145,7 @@ function InsightCardInternal(
showEditingControls={showEditingControls}
showDetailsControls={showDetailsControls}
moreButtons={moreButtons}
variablesOverride={variablesOverride}
/>
<div className="InsightCard__viz">
<Query
@ -152,6 +157,7 @@ function InsightCardInternal(
readOnly
embedded
inSharedMode={placement === DashboardPlacement.Public}
variablesOverride={variablesOverride}
/>
</div>
</BindLogic>

View File

@ -44,6 +44,7 @@ interface InsightMetaProps
| 'showEditingControls'
| 'showDetailsControls'
| 'moreButtons'
| 'variablesOverride'
> {
insight: QueryBasedInsightModel
areDetailsShown?: boolean
@ -55,6 +56,7 @@ export function InsightMeta({
ribbonColor,
dashboardId,
updateColor,
variablesOverride,
removeFromDashboard,
deleteWithUndo,
refresh,
@ -98,7 +100,7 @@ export function InsightMeta({
topHeading={<TopHeading insight={insight} />}
meta={
<>
<Link to={urls.insightView(short_id, dashboardId)}>
<Link to={urls.insightView(short_id, dashboardId, variablesOverride)}>
<h4 title={name} data-attr="insight-card-title">
{name || <i>{summary}</i>}
{loading && (
@ -130,7 +132,7 @@ export function InsightMeta({
moreButtons={
<>
<>
<LemonButton to={urls.insightView(short_id, dashboardId)} fullWidth>
<LemonButton to={urls.insightView(short_id, dashboardId, variablesOverride)} fullWidth>
View
</LemonButton>
{refresh && (

View File

@ -13,6 +13,7 @@ import {
DashboardFilter,
DataTableNode,
DataVisualizationNode,
HogQLVariable,
InsightVizNode,
Node,
} from '~/queries/schema'
@ -50,10 +51,20 @@ export interface QueryProps<Q extends Node> {
inSharedMode?: boolean
/** Dashboard filters to override the ones in the query */
filtersOverride?: DashboardFilter | null
/** Dashboard variables to override the ones in the query */
variablesOverride?: Record<string, HogQLVariable> | null
}
export function Query<Q extends Node>(props: QueryProps<Q>): JSX.Element | null {
const { query: propsQuery, setQuery: propsSetQuery, readOnly, embedded, filtersOverride, inSharedMode } = props
const {
query: propsQuery,
setQuery: propsSetQuery,
readOnly,
embedded,
filtersOverride,
variablesOverride,
inSharedMode,
} = props
const [localQuery, localSetQuery] = useState(propsQuery)
useEffect(() => {
@ -102,6 +113,7 @@ export function Query<Q extends Node>(props: QueryProps<Q>): JSX.Element | null
uniqueKey={uniqueKey}
context={queryContext}
readOnly={readOnly}
variablesOverride={props.variablesOverride}
/>
)
} else if (isSavedInsightNode(query)) {
@ -117,6 +129,7 @@ export function Query<Q extends Node>(props: QueryProps<Q>): JSX.Element | null
embedded={embedded}
inSharedMode={inSharedMode}
filtersOverride={filtersOverride}
variablesOverride={variablesOverride}
/>
)
} else if (isWebOverviewQuery(query)) {

View File

@ -2,7 +2,7 @@ import { expectLogic, partial } from 'kea-test-utils'
import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic'
import { performQuery } from '~/queries/query'
import { DashboardFilter, NodeKind } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, NodeKind } from '~/queries/schema'
import { initKeaTests } from '~/test/init'
jest.mock('~/queries/query', () => {
@ -473,6 +473,40 @@ describe('dataNodeLogic', () => {
expect.any(String),
expect.any(Function),
filtersOverride,
undefined,
false
)
})
it('passes variablesOverride to api', async () => {
const variablesOverride: Record<string, HogQLVariable> = {
test_1: {
variableId: 'some_id',
code_name: 'some_name',
value: 'hello world',
},
}
const query = {
kind: NodeKind.EventsQuery,
select: ['*', 'event', 'timestamp'],
}
logic = dataNodeLogic({
key: 'key',
query,
variablesOverride,
})
logic.mount()
expect(performQuery).toHaveBeenCalledWith(
query,
expect.anything(),
false,
expect.any(String),
expect.any(Function),
undefined,
variablesOverride,
false
)
})
@ -497,6 +531,32 @@ describe('dataNodeLogic', () => {
expect.any(String),
expect.any(Function),
undefined,
undefined,
false
)
})
it("doesn't pass undefined variablesOverride to api", async () => {
const query = {
kind: NodeKind.EventsQuery,
select: ['*', 'event', 'timestamp'],
}
logic = dataNodeLogic({
key: 'key',
query,
variablesOverride: undefined,
})
logic.mount()
expect(performQuery).toHaveBeenCalledWith(
query,
expect.anything(),
false,
expect.any(String),
expect.any(Function),
undefined,
undefined,
false
)
})

View File

@ -28,7 +28,7 @@ import { userLogic } from 'scenes/userLogic'
import { dataNodeCollectionLogic, DataNodeCollectionProps } from '~/queries/nodes/DataNode/dataNodeCollectionLogic'
import { removeExpressionComment } from '~/queries/nodes/DataTable/utils'
import { performQuery } from '~/queries/query'
import { DashboardFilter, QueryStatus } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, QueryStatus } from '~/queries/schema'
import {
ActorsQuery,
ActorsQueryResponse,
@ -66,6 +66,8 @@ export interface DataNodeLogicProps {
/** Dashboard filters to override the ones in the query */
filtersOverride?: DashboardFilter | null
/** Dashboard variables to override the ones in the query */
variablesOverride?: Record<string, HogQLVariable> | null
}
export const AUTOLOAD_INTERVAL = 30000
@ -99,7 +101,7 @@ export const dataNodeLogic = kea<dataNodeLogicType>([
],
],
})),
props({ query: {} } as DataNodeLogicProps),
props({ query: {}, variablesOverride: undefined } as DataNodeLogicProps),
propsChanged(({ actions, props }, oldProps) => {
if (!props.query) {
return // Can't do anything without a query
@ -214,6 +216,7 @@ export const dataNodeLogic = kea<dataNodeLogicType>([
queryId,
actions.setPollResponse,
props.filtersOverride,
props.variablesOverride,
pollOnly
)) ?? null
const duration = performance.now() - now
@ -451,6 +454,10 @@ export const dataNodeLogic = kea<dataNodeLogicType>([
],
})),
selectors(({ cache }) => ({
variableOverridesAreSet: [
(_, p) => [p.variablesOverride ?? (() => ({}))],
(variablesOverride) => !!variablesOverride,
],
isShowingCachedResults: [
() => [(_, props) => props.cachedResults ?? null, (_, props) => props.query],
(cachedResults: AnyResponseType | null, query: DataNode): boolean => {

View File

@ -67,6 +67,7 @@ describe('dataTableLogic', () => {
expect.any(String),
expect.any(Function),
undefined,
undefined,
false
)
expect(performQuery).toHaveBeenCalledTimes(1)

View File

@ -1,6 +1,6 @@
import './Variables.scss'
import { IconCopy, IconGear } from '@posthog/icons'
import { IconCopy, IconGear, IconTrash } from '@posthog/icons'
import { LemonButton, LemonDivider, LemonInput, Popover } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
import { FEATURE_FLAGS } from 'lib/constants'
@ -8,17 +8,49 @@ import { LemonField } from 'lib/lemon-ui/LemonField'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { copyToClipboard } from 'lib/utils/copyToClipboard'
import { useEffect, useRef, useState } from 'react'
import { dashboardLogic } from 'scenes/dashboard/dashboardLogic'
import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic'
import { dataVisualizationLogic } from '../../dataVisualizationLogic'
import { Variable } from '../../types'
import { NewVariableModal } from './NewVariableModal'
import { variablesLogic } from './variablesLogic'
export const Variables = (): JSX.Element => {
export const VariablesForDashboard = (): JSX.Element => {
const { featureFlags } = useValues(featureFlagLogic)
const { variablesForInsight } = useValues(variablesLogic)
const { dashboardVariables } = useValues(dashboardLogic)
const { overrideVariableValue } = useActions(dashboardLogic)
if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !variablesForInsight.length) {
if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !dashboardVariables.length) {
return <></>
}
return (
<>
<div className="flex gap-4 flex-wrap px-px mt-4">
{dashboardVariables.map((n) => (
<VariableComponent
key={n.id}
variable={n}
showEditingUI={false}
onChange={overrideVariableValue}
variableOverridesAreSet={false}
/>
))}
</div>
</>
)
}
export const VariablesForInsight = (): JSX.Element => {
const { featureFlags } = useValues(featureFlagLogic)
const { variablesForInsight, showVariablesBar } = useValues(variablesLogic)
const { updateVariableValue, removeVariable } = useActions(variablesLogic)
const { showEditingUI } = useValues(dataVisualizationLogic)
const { variableOverridesAreSet } = useValues(dataNodeLogic)
if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !variablesForInsight.length || !showVariablesBar) {
return <></>
}
@ -26,7 +58,14 @@ export const Variables = (): JSX.Element => {
<>
<div className="flex gap-4 flex-wrap px-px">
{variablesForInsight.map((n) => (
<VariableComponent key={n.id} variable={n} />
<VariableComponent
key={n.id}
variable={n}
showEditingUI={showEditingUI}
onChange={updateVariableValue}
onRemove={removeVariable}
variableOverridesAreSet={variableOverridesAreSet}
/>
))}
</div>
<NewVariableModal />
@ -34,10 +73,21 @@ export const Variables = (): JSX.Element => {
)
}
const VariableInput = ({ variable, closePopover }: { variable: Variable; closePopover: () => void }): JSX.Element => {
const { showEditingUI } = useValues(dataVisualizationLogic)
const { updateVariableValue } = useActions(variablesLogic)
interface VariableInputProps {
variable: Variable
showEditingUI: boolean
closePopover: () => void
onChange: (variableId: string, value: any) => void
onRemove?: (variableId: string) => void
}
const VariableInput = ({
variable,
showEditingUI,
closePopover,
onChange,
onRemove,
}: VariableInputProps): JSX.Element => {
const [localInputValue, setLocalInputValue] = useState(variable.value ?? variable.default_value ?? '')
const inputRef = useRef<HTMLInputElement>(null)
@ -59,14 +109,14 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo
value={localInputValue.toString()}
onChange={(value) => setLocalInputValue(value)}
onPressEnter={() => {
updateVariableValue(variable.id, localInputValue)
onChange(variable.id, localInputValue)
closePopover()
}}
/>
<LemonButton
type="primary"
onClick={() => {
updateVariableValue(variable.id, localInputValue)
onChange(variable.id, localInputValue)
closePopover()
}}
>
@ -102,6 +152,14 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo
onClick={() => void copyToClipboard(variableAsHogQL, 'variable HogQL')}
tooltip="Copy HogQL"
/>
{onRemove && (
<LemonButton
onClick={() => onRemove(variable.id)}
icon={<IconTrash />}
size="xsmall"
tooltip="Remove variable from insight"
/>
)}
<LemonButton icon={<IconGear />} size="xsmall" tooltip="Open variable settings" />
</div>
</>
@ -110,13 +168,35 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo
)
}
const VariableComponent = ({ variable }: { variable: Variable }): JSX.Element => {
interface VariableComponentProps {
variable: Variable
showEditingUI: boolean
onChange: (variableId: string, value: any) => void
variableOverridesAreSet: boolean
onRemove?: (variableId: string) => void
}
const VariableComponent = ({
variable,
showEditingUI,
onChange,
variableOverridesAreSet,
onRemove,
}: VariableComponentProps): JSX.Element => {
const [isPopoverOpen, setPopoverOpen] = useState(false)
return (
<Popover
key={variable.id}
overlay={<VariableInput variable={variable} closePopover={() => setPopoverOpen(false)} />}
overlay={
<VariableInput
variable={variable}
showEditingUI={showEditingUI}
onChange={onChange}
closePopover={() => setPopoverOpen(false)}
onRemove={onRemove}
/>
}
visible={isPopoverOpen}
onClickOutside={() => setPopoverOpen(false)}
className="DataVizVariable_Popover"
@ -131,6 +211,7 @@ const VariableComponent = ({ variable }: { variable: Variable }): JSX.Element =>
type="secondary"
className="min-w-32 DataVizVariable_Button"
onClick={() => setPopoverOpen(!isPopoverOpen)}
disabledReason={variableOverridesAreSet && 'Discard dashboard variables to change'}
>
{variable.value ?? variable.default_value}
</LemonButton>

View File

@ -1,9 +1,11 @@
import { actions, kea, path, reducers } from 'kea'
import { actions, connect, kea, key, path, props, reducers } from 'kea'
import { loaders } from 'kea-loaders'
import api from 'lib/api'
import { BooleanVariable, ListVariable, NumberVariable, StringVariable, Variable, VariableType } from '../../types'
import type { addVariableLogicType } from './addVariableLogicType'
import { variableDataLogic } from './variableDataLogic'
import { variablesLogic } from './variablesLogic'
const DEFAULT_VARIABLE: StringVariable = {
id: '',
@ -13,8 +15,17 @@ const DEFAULT_VARIABLE: StringVariable = {
code_name: '',
}
export interface AddVariableLogicProps {
key: string
}
export const addVariableLogic = kea<addVariableLogicType>([
path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variableLogic']),
props({ key: '' } as AddVariableLogicProps),
key((props) => props.key),
connect({
actions: [variableDataLogic, ['getVariables'], variablesLogic, ['addVariable']],
}),
actions({
openModal: (variableType: VariableType) => ({ variableType }),
closeModal: true,
@ -86,12 +97,18 @@ export const addVariableLogic = kea<addVariableLogicType>([
},
],
}),
loaders(({ values }) => ({
loaders(({ values, actions }) => ({
savedVariable: [
null as null | Variable,
{
save: async () => {
return await api.insightVariables.create(values.variable)
const variable = await api.insightVariables.create(values.variable)
actions.getVariables()
actions.addVariable({ variableId: variable.id, code_name: variable.code_name })
actions.closeModal()
return variable
},
},
],

View File

@ -0,0 +1,22 @@
import { kea, path } from 'kea'
import { loaders } from 'kea-loaders'
import api from 'lib/api'
import { Variable } from '../../types'
import type { variableDataLogicType } from './variableDataLogicType'
export const variableDataLogic = kea<variableDataLogicType>([
path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variableDataLogic']),
loaders({
variables: [
[] as Variable[],
{
getVariables: async () => {
const insights = await api.insightVariables.list()
return insights.results
},
},
],
}),
])

View File

@ -1,15 +1,14 @@
import { actions, afterMount, connect, kea, key, path, props, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { subscriptions } from 'kea-subscriptions'
import api from 'lib/api'
import { FEATURE_FLAGS } from 'lib/constants'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { getVariablesFromQuery } from 'scenes/insights/utils/queryUtils'
import { getVariablesFromQuery, haveVariablesOrFiltersChanged } from 'scenes/insights/utils/queryUtils'
import { DataVisualizationNode, HogQLVariable } from '~/queries/schema'
import { dataVisualizationLogic } from '../../dataVisualizationLogic'
import { Variable } from '../../types'
import { variableDataLogic } from './variableDataLogic'
import type { variablesLogicType } from './variablesLogicType'
export interface VariablesLogicProps {
@ -23,21 +22,39 @@ export const variablesLogic = kea<variablesLogicType>([
props({ key: '' } as VariablesLogicProps),
key((props) => props.key),
connect({
actions: [dataVisualizationLogic, ['setQuery', 'loadData']],
values: [dataVisualizationLogic, ['query'], featureFlagLogic, ['featureFlags']],
actions: [dataVisualizationLogic, ['setQuery', 'loadData'], variableDataLogic, ['getVariables']],
values: [
dataVisualizationLogic,
['query', 'insightLogicProps'],
variableDataLogic,
['variables', 'variablesLoading'],
featureFlagLogic,
['featureFlags'],
],
}),
actions({
addVariable: (variable: HogQLVariable) => ({ variable }),
addVariables: (variables: HogQLVariable[]) => ({ variables }),
removeVariable: (variableId: string) => ({ variableId }),
updateVariableValue: (variableId: string, value: any) => ({ variableId, value }),
setEditorQuery: (query: string) => ({ query }),
resetVariables: true,
updateSourceQuery: true,
}),
reducers({
internalSelectedVariables: [
[] as HogQLVariable[],
{
addVariable: (state, { variable }) => {
if (state.find((n) => variable.variableId === n.variableId)) {
return state
}
return [...state, { ...variable }]
},
addVariables: (state, { variables }) => {
return [...state, ...variables.map((n) => ({ ...n }))]
},
updateVariableValue: (state, { variableId, value }) => {
const variableIndex = state.findIndex((n) => n.variableId === variableId)
if (variableIndex < 0) {
@ -49,6 +66,16 @@ export const variablesLogic = kea<variablesLogicType>([
return variablesInState
},
removeVariable: (state, { variableId }) => {
const stateCopy = [...state]
const index = stateCopy.findIndex((n) => n.variableId === variableId)
if (index >= 0) {
stateCopy.splice(index)
}
return stateCopy
},
resetVariables: () => [],
},
],
editorQuery: [
@ -59,23 +86,11 @@ export const variablesLogic = kea<variablesLogicType>([
},
],
}),
loaders({
variables: [
[] as Variable[],
{
getVariables: async () => {
const insights = await api.insightVariables.list()
return insights.results
},
},
],
}),
selectors({
variablesForInsight: [
(s) => [s.variables, s.internalSelectedVariables],
(variables, internalSelectedVariables): Variable[] => {
if (!variables.length || !internalSelectedVariables.length) {
(s) => [s.variables, s.internalSelectedVariables, s.variablesLoading],
(variables, internalSelectedVariables, variablesLoading): Variable[] => {
if (!variables.length || !internalSelectedVariables.length || variablesLoading) {
return []
}
@ -91,9 +106,30 @@ export const variablesLogic = kea<variablesLogicType>([
.filter((n): n is Variable => Boolean(n))
},
],
showVariablesBar: [
(state) => [state.insightLogicProps],
(insightLogicProps) => {
return !insightLogicProps.dashboardId
},
],
}),
subscriptions(({ props, actions, values }) => ({
variablesForInsight: (variables: Variable[]) => {
listeners(({ props, values, actions }) => ({
addVariable: () => {
actions.updateSourceQuery()
},
removeVariable: () => {
actions.updateSourceQuery()
},
updateVariableValue: () => {
actions.updateSourceQuery()
},
updateSourceQuery: () => {
if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) {
return
}
const variables = values.variablesForInsight
const query: DataVisualizationNode = {
...values.query,
source: {
@ -112,7 +148,8 @@ export const variablesLogic = kea<variablesLogicType>([
},
}
if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) {
const queryVarsHaveChanged = haveVariablesOrFiltersChanged(query.source, values.query.source)
if (!queryVarsHaveChanged) {
return
}
@ -124,6 +161,8 @@ export const variablesLogic = kea<variablesLogicType>([
actions.setQuery(query)
}
},
})),
subscriptions(({ actions, values }) => ({
editorQuery: (query: string) => {
const queryVariableMatches = getVariablesFromQuery(query)
@ -143,16 +182,25 @@ export const variablesLogic = kea<variablesLogicType>([
}
})
},
query: (query: DataVisualizationNode) => {
if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) {
return
}
actions.resetVariables()
const variables = Object.values(query.source.variables ?? {})
if (variables.length) {
actions.addVariables(variables)
}
},
})),
afterMount(({ actions, values }) => {
if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) {
return
}
Object.values(values.query.source.variables ?? {}).forEach((variable) => {
actions.addVariable(variable)
})
actions.getVariables()
}),
])

View File

@ -17,7 +17,14 @@ import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber'
import { urls } from 'scenes/urls'
import { insightVizDataCollectionId, insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz'
import { AnyResponseType, DataVisualizationNode, HogQLQuery, HogQLQueryResponse, NodeKind } from '~/queries/schema'
import {
AnyResponseType,
DataVisualizationNode,
HogQLQuery,
HogQLQueryResponse,
HogQLVariable,
NodeKind,
} from '~/queries/schema'
import { QueryContext } from '~/queries/types'
import { ChartDisplayType, ExporterFormat, InsightLogicProps } from '~/types'
@ -32,7 +39,8 @@ import { SideBar } from './Components/SideBar'
import { Table } from './Components/Table'
import { TableDisplay } from './Components/TableDisplay'
import { AddVariableButton } from './Components/Variables/AddVariableButton'
import { Variables } from './Components/Variables/Variables'
import { addVariableLogic } from './Components/Variables/addVariableLogic'
import { VariablesForInsight } from './Components/Variables/Variables'
import { variablesLogic } from './Components/Variables/variablesLogic'
import { dataVisualizationLogic, DataVisualizationLogicProps } from './dataVisualizationLogic'
import { displayLogic } from './displayLogic'
@ -46,6 +54,8 @@ interface DataTableVisualizationProps {
the data node logic becomes read only implicitly */
cachedResults?: AnyResponseType
readOnly?: boolean
/** Dashboard variables to override the ones in the query */
variablesOverride?: Record<string, HogQLVariable> | null
}
let uniqueNode = 0
@ -57,6 +67,7 @@ export function DataTableVisualization({
context,
cachedResults,
readOnly,
variablesOverride,
}: DataTableVisualizationProps): JSX.Element {
const [key] = useState(`DataVisualizationNode.${uniqueKey ?? uniqueNode++}`)
const insightProps: InsightLogicProps<DataVisualizationNode> = context?.insightProps || {
@ -73,6 +84,7 @@ export function DataTableVisualization({
insightLogicProps: insightProps,
setQuery,
cachedResults,
variablesOverride,
}
const dataNodeLogicProps: DataNodeLogicProps = {
@ -81,6 +93,7 @@ export function DataTableVisualization({
cachedResults,
loadPriority: insightProps.loadPriority,
dataNodeCollectionId: insightVizDataCollectionId(insightProps, key),
variablesOverride,
}
return (
@ -91,14 +104,16 @@ export function DataTableVisualization({
logic={variablesLogic}
props={{ key: dataVisualizationLogicProps.key, readOnly: readOnly ?? false }}
>
<InternalDataTableVisualization
uniqueKey={key}
query={query}
setQuery={setQuery}
context={context}
cachedResults={cachedResults}
readOnly={readOnly}
/>
<BindLogic logic={addVariableLogic} props={{ key: dataVisualizationLogicProps.key }}>
<InternalDataTableVisualization
uniqueKey={key}
query={query}
setQuery={setQuery}
context={context}
cachedResults={cachedResults}
readOnly={readOnly}
/>
</BindLogic>
</BindLogic>
</BindLogic>
</BindLogic>
@ -238,7 +253,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX
</>
)}
<Variables />
<VariablesForInsight />
<div className="flex flex-1 flex-row gap-4">
{showEditingUI && isChartSettingsPanelOpen && (

View File

@ -16,6 +16,7 @@ import {
ChartSettingsFormatting,
ConditionalFormattingRule,
DataVisualizationNode,
HogQLVariable,
} from '~/queries/schema'
import { QueryContext } from '~/queries/types'
import { ChartDisplayType, InsightLogicProps, ItemMode } from '~/types'
@ -68,6 +69,8 @@ export interface DataVisualizationLogicProps {
context?: QueryContext<DataVisualizationNode>
cachedResults?: AnyResponseType
insightLoading?: boolean
/** Dashboard variables to override the ones in the query */
variablesOverride?: Record<string, HogQLVariable> | null
}
export interface SelectedYAxis {
@ -222,6 +225,7 @@ export const dataVisualizationLogic = kea<dataVisualizationLogicType>([
query: props.query.source,
dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key),
loadPriority: props.insightLogicProps.loadPriority,
variablesOverride: props.variablesOverride,
}),
['response', 'responseLoading', 'responseError', 'queryCancelled'],
themeLogic,
@ -234,11 +238,12 @@ export const dataVisualizationLogic = kea<dataVisualizationLogicType>([
query: props.query.source,
dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key),
loadPriority: props.insightLogicProps.loadPriority,
variablesOverride: props.variablesOverride,
}),
['loadData'],
],
})),
props({ query: {} } as DataVisualizationLogicProps),
props({ query: { source: {} } } as DataVisualizationLogicProps),
actions(({ values }) => ({
setVisualizationType: (visualizationType: ChartDisplayType) => ({ visualizationType }),
updateXSeries: (columnName: string) => ({
@ -559,6 +564,7 @@ export const dataVisualizationLogic = kea<dataVisualizationLogicType>([
return insightMode == ItemMode.Edit
},
],
insightLogicProps: [(_state, props) => [props.insightLogicProps], (insightLogicProps) => insightLogicProps],
showResultControls: [
(state, props) => [state.insightMode, props.insightLogicProps],
(insightMode, insightLogicProps) => {

View File

@ -10,7 +10,7 @@ import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic'
import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils'
import { ErrorBoundary } from '~/layout/ErrorBoundary'
import { DashboardFilter, InsightVizNode } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, InsightVizNode } from '~/queries/schema'
import { QueryContext } from '~/queries/types'
import { isFunnelsQuery } from '~/queries/utils'
import { InsightLogicProps, ItemMode } from '~/types'
@ -38,6 +38,7 @@ type InsightVizProps = {
embedded?: boolean
inSharedMode?: boolean
filtersOverride?: DashboardFilter | null
variablesOverride?: Record<string, HogQLVariable> | null
}
let uniqueNode = 0
@ -51,6 +52,7 @@ export function InsightViz({
embedded,
inSharedMode,
filtersOverride,
variablesOverride,
}: InsightVizProps): JSX.Element {
const [key] = useState(() => `InsightViz.${uniqueKey || uniqueNode++}`)
const insightProps: InsightLogicProps = context?.insightProps || {
@ -59,6 +61,7 @@ export function InsightViz({
setQuery,
dataNodeCollectionId: key,
filtersOverride,
variablesOverride,
}
if (!insightProps.setQuery && setQuery) {
@ -75,6 +78,7 @@ export function InsightViz({
loadPriority: insightProps.loadPriority,
dataNodeCollectionId: insightVizDataCollectionId(insightProps, vizKey),
filtersOverride,
variablesOverride,
}
const { insightMode } = useValues(insightSceneLogic)

View File

@ -6,7 +6,16 @@ import posthog from 'posthog-js'
import { OnlineExportContext, QueryExportContext } from '~/types'
import { DashboardFilter, DataNode, HogQLQuery, HogQLQueryResponse, NodeKind, PersonsNode, QueryStatus } from './schema'
import {
DashboardFilter,
DataNode,
HogQLQuery,
HogQLQueryResponse,
HogQLVariable,
NodeKind,
PersonsNode,
QueryStatus,
} from './schema'
import {
isAsyncResponse,
isDataTableNode,
@ -79,6 +88,7 @@ async function executeQuery<N extends DataNode>(
queryId?: string,
setPollResponse?: (response: QueryStatus) => void,
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null,
/**
* Whether to limit the function to just polling the provided query ID.
* This is important in shared contexts, where we cannot create arbitrary queries via POST we can only GET.
@ -91,7 +101,15 @@ async function executeQuery<N extends DataNode>(
!!featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC]
if (!pollOnly) {
const response = await api.query(queryNode, methodOptions, queryId, refresh, isAsyncQuery, filtersOverride)
const response = await api.query(
queryNode,
methodOptions,
queryId,
refresh,
isAsyncQuery,
filtersOverride,
variablesOverride
)
if (!isAsyncResponse(response)) {
// Executed query synchronously or from cache
@ -124,6 +142,7 @@ export async function performQuery<N extends DataNode>(
queryId?: string,
setPollResponse?: (status: QueryStatus) => void,
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null,
pollOnly = false
): Promise<NonNullable<N['response']>> {
let response: NonNullable<N['response']>
@ -141,6 +160,7 @@ export async function performQuery<N extends DataNode>(
queryId,
setPollResponse,
filtersOverride,
variablesOverride,
pollOnly
)
if (isHogQLQuery(queryNode) && response && typeof response === 'object') {

View File

@ -7986,6 +7986,12 @@
"$ref": "#/definitions/RefreshType",
"default": "blocking",
"description": "Whether results should be calculated sync or async, and how much to rely on the cache:\n- `'blocking'` - calculate synchronously (returning only when the query is done), UNLESS there are very fresh results in the cache\n- `'async'` - kick off background calculation (returning immediately with a query status), UNLESS there are very fresh results in the cache\n- `'lazy_async'` - kick off background calculation, UNLESS there are somewhat fresh results in the cache\n- `'force_blocking'` - calculate synchronously, even if fresh results are already cached\n- `'force_async'` - kick off background calculation, even if fresh results are already cached\n- `'force_cache'` - return cached data or a cache miss; always completes immediately as it never calculates Background calculation can be tracked using the `query_status` response field."
},
"variables_override": {
"additionalProperties": {
"type": "object"
},
"type": "object"
}
},
"required": ["query"],

View File

@ -1221,6 +1221,7 @@ export interface QueryRequest {
*/
query: QuerySchema
filters_override?: DashboardFilter
variables_override?: Record<string, Record<string, any>>
}
/**

View File

@ -12,6 +12,7 @@ import { InsightErrorState } from 'scenes/insights/EmptyStates'
import { SceneExport } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
import { VariablesForDashboard } from '~/queries/nodes/DataVisualization/Components/Variables/Variables'
import { DashboardMode, DashboardPlacement, DashboardType, QueryBasedInsightModel } from '~/types'
import { DashboardHeader } from './DashboardHeader'
@ -124,6 +125,7 @@ function DashboardScene(): JSX.Element {
</div>
)}
</div>
<VariablesForDashboard />
<DashboardItems />
</div>
)}

View File

@ -27,6 +27,7 @@ export function DashboardItems(): JSX.Element {
refreshStatus,
canEditDashboard,
itemsLoading,
temporaryVariables,
} = useValues(dashboardLogic)
const {
updateLayouts,
@ -152,6 +153,7 @@ export function DashboardItems(): JSX.Element {
showDetailsControls={placement != DashboardPlacement.Export}
placement={placement}
loadPriority={smLayout ? smLayout.y * 1000 + smLayout.x : undefined}
variablesOverride={temporaryVariables}
{...commonTileProps}
/>
)

View File

@ -306,7 +306,7 @@ describe('dashboardLogic', () => {
jest.spyOn(api, 'update')
await expectLogic(logic, () => {
logic.actions.updateFiltersAndLayouts()
logic.actions.updateFiltersAndLayoutsAndVariables()
}).toFinishAllListeners()
expect(api.update).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/dashboards/5`, {
@ -329,6 +329,7 @@ describe('dashboardLogic', () => {
date_to: null,
properties: [],
},
variables: {},
})
})
})

View File

@ -23,6 +23,7 @@ import { Link } from 'lib/lemon-ui/Link'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { clearDOMTextSelection, isAbortedRequest, shouldCancelQuery, toParams, uuid } from 'lib/utils'
import { DashboardEventSource, eventUsageLogic } from 'lib/utils/eventUsageLogic'
import uniqBy from 'lodash.uniqby'
import { Layout, Layouts } from 'react-grid-layout'
import { calculateLayouts } from 'scenes/dashboard/tileLayouts'
import { Scene } from 'scenes/sceneTypes'
@ -31,9 +32,11 @@ import { userLogic } from 'scenes/userLogic'
import { dashboardsModel } from '~/models/dashboardsModel'
import { insightsModel } from '~/models/insightsModel'
import { variableDataLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variableDataLogic'
import { Variable } from '~/queries/nodes/DataVisualization/types'
import { getQueryBasedDashboard, getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils'
import { pollForResults } from '~/queries/query'
import { DashboardFilter, RefreshType } from '~/queries/schema'
import { DashboardFilter, DataVisualizationNode, HogQLVariable, NodeKind, RefreshType } from '~/queries/schema'
import {
AnyPropertyFilter,
Breadcrumb,
@ -139,7 +142,8 @@ async function getSingleInsight(
queryId: string,
refresh: RefreshType,
methodOptions?: ApiMethodOptions,
filtersOverride?: DashboardFilter
filtersOverride?: DashboardFilter,
variablesOverride?: Record<string, HogQLVariable>
): Promise<QueryBasedInsightModel | null> {
const apiUrl = `api/environments/${currentTeamId}/insights/${insight.id}/?${toParams({
refresh,
@ -147,6 +151,7 @@ async function getSingleInsight(
client_query_id: queryId,
session_id: currentSessionId(),
...(filtersOverride ? { filters_override: filtersOverride } : {}),
...(variablesOverride ? { variables_override: variablesOverride } : {}),
})}`
const insightResponse: Response = await api.getResponse(apiUrl, methodOptions)
const legacyInsight: InsightModel | null = await getJSONOrNull(insightResponse)
@ -156,7 +161,7 @@ async function getSingleInsight(
export const dashboardLogic = kea<dashboardLogicType>([
path(['scenes', 'dashboard', 'dashboardLogic']),
connect(() => ({
values: [teamLogic, ['currentTeamId'], featureFlagLogic, ['featureFlags']],
values: [teamLogic, ['currentTeamId'], featureFlagLogic, ['featureFlags'], variableDataLogic, ['variables']],
logic: [dashboardsModel, insightsModel, eventUsageLogic],
})),
@ -169,7 +174,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
return props.id
}),
actions({
actions(({ values }) => ({
loadDashboard: (payload: {
refresh?: RefreshType
action:
@ -201,7 +206,10 @@ export const dashboardLogic = kea<dashboardLogicType>([
date_to,
}),
setProperties: (properties: AnyPropertyFilter[] | null) => ({ properties }),
setFiltersAndLayouts: (filters: DashboardFilter) => ({ filters }),
setFiltersAndLayoutsAndVariables: (filters: DashboardFilter, variables: Record<string, HogQLVariable>) => ({
filters,
variables,
}),
setAutoRefresh: (enabled: boolean, interval: number) => ({ enabled, interval }),
setRefreshStatus: (shortId: InsightShortId, loading = false, queued = false) => ({ shortId, loading, queued }),
setRefreshStatuses: (shortIds: InsightShortId[], loading = false, queued = false) => ({
@ -233,8 +241,14 @@ export const dashboardLogic = kea<dashboardLogicType>([
setInitialLoadResponseBytes: (responseBytes: number) => ({ responseBytes }),
abortQuery: (payload: { dashboardQueryId: string; queryId: string; queryStartTime: number }) => payload,
abortAnyRunningQuery: true,
updateFiltersAndLayouts: true,
}),
updateFiltersAndLayoutsAndVariables: true,
overrideVariableValue: (variableId: string, value: any) => ({
variableId,
value,
allVariables: values.variables,
}),
resetVariables: () => ({ variables: values.insightVariables }),
})),
loaders(({ actions, props, values }) => ({
dashboard: [
@ -248,7 +262,8 @@ export const dashboardLogic = kea<dashboardLogicType>([
try {
const apiUrl = values.apiUrl(
refresh || 'async',
action === 'preview' ? values.temporaryFilters : undefined
action === 'preview' ? values.temporaryFilters : undefined,
action === 'preview' ? values.temporaryVariables : undefined
)
const dashboardResponse: Response = await api.getResponse(apiUrl)
const dashboard: DashboardType<InsightModel> | null = await getJSONOrNull(dashboardResponse)
@ -282,7 +297,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
throw error
}
},
updateFiltersAndLayouts: async (_, breakpoint) => {
updateFiltersAndLayoutsAndVariables: async (_, breakpoint) => {
actions.abortAnyRunningQuery()
try {
@ -297,6 +312,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
`api/environments/${values.currentTeamId}/dashboards/${props.id}`,
{
filters: values.filters,
variables: values.insightVariables,
tiles: layoutsToUpdate,
}
)
@ -432,6 +448,48 @@ export const dashboardLogic = kea<dashboardLogicType>([
},
},
],
temporaryVariables: [
{} as Record<string, HogQLVariable>,
{
overrideVariableValue: (state, { variableId, value, allVariables }) => {
const foundExistingVar = allVariables.find((n) => n.id === variableId)
if (!foundExistingVar) {
return state
}
return {
...state,
[variableId]: { code_name: foundExistingVar.code_name, variableId: foundExistingVar.id, value },
}
},
resetVariables: (_, { variables }) => ({ ...variables }),
loadDashboardSuccess: (state, { dashboard, payload }) =>
dashboard
? {
...state,
// don't update filters if we're previewing
...(payload?.action === 'preview' ? {} : dashboard.variables ?? {}),
}
: state,
},
],
insightVariables: [
{} as Record<string, HogQLVariable>,
{
setFiltersAndLayoutsAndVariables: (state, { variables }) => ({
...state,
...variables,
}),
loadDashboardSuccess: (state, { dashboard, payload }) =>
dashboard
? {
...state,
// don't update filters if we're previewing
...(payload?.action === 'preview' ? {} : dashboard.variables ?? {}),
}
: state,
},
],
temporaryFilters: [
{
date_from: null,
@ -466,7 +524,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
properties: null,
} as DashboardFilter,
{
setFiltersAndLayouts: (state, { filters }) => ({
setFiltersAndLayoutsAndVariables: (state, { filters }) => ({
...state,
...filters,
}),
@ -689,6 +747,44 @@ export const dashboardLogic = kea<dashboardLogicType>([
],
})),
selectors(() => ({
dashboardVariables: [
(s) => [s.dashboard, s.variables, s.temporaryVariables],
(
dashboard: DashboardType,
allVariables: Variable[],
temporaryVariables: Record<string, HogQLVariable>
): Variable[] => {
const dataVizNodes = dashboard.tiles
.map((n) => n.insight?.query)
.filter((n) => n?.kind === NodeKind.DataVisualizationNode)
.filter((n): n is DataVisualizationNode => Boolean(n))
const hogQLVariables = dataVizNodes
.map((n) => n.source.variables)
.filter((n): n is Record<string, HogQLVariable> => Boolean(n))
.flatMap((n) => Object.values(n))
const uniqueVars = uniqBy(hogQLVariables, (n) => n.variableId)
return uniqueVars
.map((v) => {
const foundVar = allVariables.find((n) => n.id === v.variableId)
if (!foundVar) {
return null
}
const overridenValue = temporaryVariables[v.variableId]?.value
// Overwrite the variable `value` from the insight
const resultVar: Variable = {
...foundVar,
value: overridenValue ?? v.value ?? foundVar.value,
}
return resultVar
})
.filter((n): n is Variable => Boolean(n))
},
],
asDashboardTemplate: [
(s) => [s.dashboard],
(dashboard: DashboardType): DashboardTemplateEditorType | undefined => {
@ -731,10 +827,15 @@ export const dashboardLogic = kea<dashboardLogicType>([
apiUrl: [
() => [(_, props) => props.id],
(id) => {
return (refresh?: RefreshType, filtersOverride?: DashboardFilter) =>
return (
refresh?: RefreshType,
filtersOverride?: DashboardFilter,
variablesOverride?: Record<string, HogQLVariable>
) =>
`api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}/?${toParams({
refresh,
filters_override: filtersOverride,
variables_override: variablesOverride,
})}`
},
],
@ -947,7 +1048,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
},
})),
listeners(({ actions, values, cache, props, sharedListeners }) => ({
updateFiltersAndLayoutsSuccess: () => {
updateFiltersAndLayoutsAndVariablesSuccess: () => {
actions.loadDashboard({ action: 'update' })
},
setRefreshError: sharedListeners.reportRefreshTiming,
@ -1046,7 +1147,10 @@ export const dashboardLogic = kea<dashboardLogicType>([
insight,
dashboardId,
uuid(),
'force_async'
'force_async',
undefined,
undefined,
values.temporaryVariables
)
dashboardsModel.actions.updateDashboardInsight(refreshedInsight!)
// Start polling for results
@ -1138,7 +1242,8 @@ export const dashboardLogic = kea<dashboardLogicType>([
queryId,
'force_cache',
methodOptions,
action === 'preview' ? values.temporaryFilters : undefined
action === 'preview' ? values.temporaryFilters : undefined,
action === 'preview' ? values.temporaryVariables : undefined
)
if (action === 'preview' && polledInsight!.dashboard_tiles) {
@ -1187,8 +1292,8 @@ export const dashboardLogic = kea<dashboardLogicType>([
eventUsageLogic.actions.reportDashboardRefreshed(dashboardId, values.newestRefreshed)
},
setFiltersAndLayouts: ({ filters: { date_from, date_to } }) => {
actions.updateFiltersAndLayouts()
setFiltersAndLayoutsAndVariables: ({ filters: { date_from, date_to } }) => {
actions.updateFiltersAndLayoutsAndVariables()
eventUsageLogic.actions.reportDashboardDateRangeChanged(date_from, date_to)
eventUsageLogic.actions.reportDashboardPropertiesChanged()
},
@ -1203,12 +1308,13 @@ export const dashboardLogic = kea<dashboardLogicType>([
// reset filters to that before previewing
actions.setDates(values.filters.date_from ?? null, values.filters.date_to ?? null)
actions.setProperties(values.filters.properties ?? null)
actions.resetVariables()
// also reset layout to that we stored in dashboardLayouts
// this is done in the reducer for dashboard
} else if (source === DashboardEventSource.DashboardHeaderSaveDashboard) {
// save edit mode changes
actions.setFiltersAndLayouts(values.temporaryFilters)
actions.setFiltersAndLayoutsAndVariables(values.temporaryFilters, values.temporaryVariables)
}
}
@ -1305,6 +1411,10 @@ export const dashboardLogic = kea<dashboardLogicType>([
setDates: () => {
actions.loadDashboard({ action: 'preview' })
},
overrideVariableValue: () => {
actions.setDashboardMode(DashboardMode.Edit, null)
actions.loadDashboard({ action: 'preview' })
},
})),
urlToAction(({ values, actions }) => ({

View File

@ -104,6 +104,7 @@ export const dataWarehouseExternalSceneLogic = kea<dataWarehouseExternalSceneLog
undefined,
undefined,
undefined,
undefined,
undefined
)
},
@ -115,6 +116,7 @@ export const dataWarehouseExternalSceneLogic = kea<dataWarehouseExternalSceneLog
undefined,
undefined,
undefined,
undefined,
undefined
)
id && actions.loadView(id)

View File

@ -21,14 +21,18 @@ export interface InsightSceneProps {
export function Insight({ insightId }: InsightSceneProps): JSX.Element {
// insightSceneLogic
const { insightMode, insight, filtersOverride } = useValues(insightSceneLogic)
const { insightMode, insight, filtersOverride, variablesOverride } = useValues(insightSceneLogic)
// insightLogic
const logic = insightLogic({
dashboardItemId: insightId || 'new',
// don't use cached insight if we have filtersOverride
cachedInsight: isObject(filtersOverride) && insight?.short_id === insightId ? insight : null,
cachedInsight:
(isObject(filtersOverride) || isObject(variablesOverride)) && insight?.short_id === insightId
? insight
: null,
filtersOverride,
variablesOverride,
})
const { insightProps } = useValues(logic)
@ -52,13 +56,16 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element {
<div className="Insight">
<InsightPageHeader insightLogicProps={insightProps} />
{isObject(filtersOverride) && (
{(isObject(filtersOverride) || isObject(variablesOverride)) && (
<LemonBanner type="warning" className="mb-4">
<div className="flex flex-row items-center justify-between gap-2">
<span>You are viewing this insight with filters from a dashboard</span>
<span>
You are viewing this insight with{' '}
{isObject(variablesOverride) ? 'variables' : 'filters'} from a dashboard
</span>
<LemonButton type="secondary" to={urls.insightView(insightId as InsightShortId)}>
Discard dashboard filters
Discard dashboard {isObject(variablesOverride) ? 'variables' : 'filters'}
</LemonButton>
</div>
</LemonBanner>
@ -83,6 +90,7 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element {
insightProps,
}}
filtersOverride={filtersOverride}
variablesOverride={variablesOverride}
/>
</div>
</BindLogic>

View File

@ -32,6 +32,8 @@ export const insightDataLogic = kea<insightDataLogicType>([
dataNodeLogic({
key: insightVizDataNodeKey(props),
loadPriority: props.loadPriority,
filtersOverride: props.filtersOverride,
variablesOverride: props.variablesOverride,
} as DataNodeLogicProps),
[
'query as insightQuery',

View File

@ -21,7 +21,7 @@ import { dashboardsModel } from '~/models/dashboardsModel'
import { groupsModel } from '~/models/groupsModel'
import { insightsModel } from '~/models/insightsModel'
import { tagsModel } from '~/models/tagsModel'
import { DashboardFilter, Node } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, Node } from '~/queries/schema'
import { InsightLogicProps, InsightShortId, ItemMode, QueryBasedInsightModel, SetInsightOptions } from '~/types'
import { teamLogic } from '../teamLogic'
@ -77,9 +77,14 @@ export const insightLogic: LogicWrapper<insightLogicType> = kea<insightLogicType
saveInsight: (redirectToViewMode = true) => ({ redirectToViewMode }),
saveInsightSuccess: true,
saveInsightFailure: true,
loadInsight: (shortId: InsightShortId, filtersOverride?: DashboardFilter | null) => ({
loadInsight: (
shortId: InsightShortId,
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null
) => ({
shortId,
filtersOverride,
variablesOverride,
}),
updateInsight: (insightUpdate: Partial<QueryBasedInsightModel>, callback?: () => void) => ({
insightUpdate,
@ -96,9 +101,15 @@ export const insightLogic: LogicWrapper<insightLogicType> = kea<insightLogicType
insight: [
props.cachedInsight ?? createEmptyInsight(props.dashboardItemId || 'new'),
{
loadInsight: async ({ shortId, filtersOverride }, breakpoint) => {
loadInsight: async ({ shortId, filtersOverride, variablesOverride }, breakpoint) => {
await breakpoint(100)
const insight = await insightsApi.getByShortId(shortId, undefined, 'async', filtersOverride)
const insight = await insightsApi.getByShortId(
shortId,
undefined,
'async',
filtersOverride,
variablesOverride
)
if (!insight) {
throw new Error(`Insight with shortId ${shortId} not found`)
@ -417,7 +428,11 @@ export const insightLogic: LogicWrapper<insightLogicType> = kea<insightLogicType
}
if (!props.doNotLoad && !props.cachedInsight) {
actions.loadInsight(props.dashboardItemId as InsightShortId, props.filtersOverride)
actions.loadInsight(
props.dashboardItemId as InsightShortId,
props.filtersOverride,
props.variablesOverride
)
}
},
})),

View File

@ -19,7 +19,7 @@ import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activ
import { cohortsModel } from '~/models/cohortsModel'
import { groupsModel } from '~/models/groupsModel'
import { getDefaultQuery } from '~/queries/nodes/InsightViz/utils'
import { DashboardFilter, Node } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, Node } from '~/queries/schema'
import { ActivityScope, Breadcrumb, DashboardType, InsightShortId, InsightType, ItemMode } from '~/types'
import { insightDataLogic } from './insightDataLogic'
@ -50,9 +50,10 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
insightMode: ItemMode,
itemId: string | undefined,
alertId: AlertType['id'] | undefined,
filtersOverride: DashboardFilter | undefined,
variablesOverride: Record<string, HogQLVariable> | undefined,
dashboardId: DashboardType['id'] | undefined,
dashboardName: DashboardType['name'] | undefined,
filtersOverride: DashboardFilter | undefined
dashboardName: DashboardType['name'] | undefined
) => ({
insightId,
insightMode,
@ -61,6 +62,7 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
dashboardId,
dashboardName,
filtersOverride,
variablesOverride,
}),
setInsightLogicRef: (logic: BuiltLogic<insightLogicType> | null, unmount: null | (() => void)) => ({
logic,
@ -122,6 +124,13 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
setSceneState: (_, { filtersOverride }) => (filtersOverride !== undefined ? filtersOverride : null),
},
],
variablesOverride: [
null as null | Record<string, HogQLVariable>,
{
setSceneState: (_, { variablesOverride }) =>
variablesOverride !== undefined ? variablesOverride : null,
},
],
insightLogicRef: [
null as null | {
logic: BuiltLogic<insightLogicType>
@ -222,7 +231,11 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
const oldRef = values.insightLogicRef // free old logic after mounting new one
const oldRef2 = values.insightDataLogicRef // free old logic after mounting new one
if (insightId) {
const insightProps = { dashboardItemId: insightId, filtersOverride: values.filtersOverride }
const insightProps = {
dashboardItemId: insightId,
filtersOverride: values.filtersOverride,
variablesOverride: values.variablesOverride,
}
const logic = insightLogic.build(insightProps)
const unmount = logic.mount()
@ -242,7 +255,11 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
oldRef2.unmount()
}
} else if (insightId) {
values.insightLogicRef?.logic.actions.loadInsight(insightId as InsightShortId, values.filtersOverride)
values.insightLogicRef?.logic.actions.loadInsight(
insightId as InsightShortId,
values.filtersOverride,
values.variablesOverride
)
}
},
})),
@ -294,18 +311,20 @@ export const insightSceneLogic = kea<insightSceneLogicType>([
insightMode !== values.insightMode ||
itemId !== values.itemId ||
alert_id !== values.alertId ||
!objectsEqual(searchParams['variables_override'], values.variablesOverride) ||
!objectsEqual(filtersOverride, values.filtersOverride) ||
dashboard !== values.dashboardId ||
dashboardName !== values.dashboardName ||
!objectsEqual(filtersOverride, values.filtersOverride)
dashboardName !== values.dashboardName
) {
actions.setSceneState(
insightId,
insightMode,
itemId,
alert_id,
filtersOverride,
searchParams['variables_override'],
dashboard,
dashboardName,
filtersOverride
dashboardName
)
}

View File

@ -1,7 +1,7 @@
import api from 'lib/api'
import { getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils'
import { DashboardFilter, RefreshType } from '~/queries/schema'
import { DashboardFilter, HogQLVariable, RefreshType } from '~/queries/schema'
import { InsightShortId, QueryBasedInsightModel } from '~/types'
async function _perform(
@ -20,9 +20,16 @@ export const insightsApi = {
shortId: InsightShortId,
basic?: boolean,
refresh?: RefreshType,
filtersOverride?: DashboardFilter | null
filtersOverride?: DashboardFilter | null,
variablesOverride?: Record<string, HogQLVariable> | null
): Promise<QueryBasedInsightModel | null> {
const legacyInsights = await api.insights.loadInsight(shortId, basic, refresh, filtersOverride)
const legacyInsights = await api.insights.loadInsight(
shortId,
basic,
refresh,
filtersOverride,
variablesOverride
)
if (legacyInsights.results.length === 0) {
return null
}

View File

@ -17,8 +17,21 @@ import { ChartDisplayType } from '~/types'
type CompareQueryOpts = { ignoreVisualizationOnlyChanges: boolean }
export const getVariablesFromQuery = (query: string): string[] => {
const queryVariableMatches = /\{variables\.([a-z0-9_]+)\}/gm.exec(query)
return (queryVariableMatches ?? []).filter(Boolean)
const re = /\{variables\.([a-z0-9_]+)\}/gm
const results: string[] = []
for (;;) {
const reResult = re.exec(query)
if (!reResult) {
break
}
if (reResult[1]) {
results.push(reResult[1])
}
}
return results
}
export const compareQuery = (a: Node, b: Node, opts?: CompareQueryOpts): boolean => {

View File

@ -3,7 +3,7 @@ import { AlertType } from 'lib/components/Alerts/types'
import { getCurrentTeamId } from 'lib/utils/getAppContext'
import { ExportOptions } from '~/exporter/types'
import { HogQLFilters, Node } from '~/queries/schema'
import { HogQLFilters, HogQLVariable, Node } from '~/queries/schema'
import {
ActionType,
ActivityTab,
@ -89,8 +89,20 @@ export const urls = {
}
).url,
insightEdit: (id: InsightShortId): string => `/insights/${id}/edit`,
insightView: (id: InsightShortId, dashboardId?: number): string =>
`/insights/${id}${dashboardId !== undefined ? `?dashboard=${dashboardId}` : ''}`,
insightView: (
id: InsightShortId,
dashboardId?: number,
variablesOverride?: Record<string, HogQLVariable>
): string => {
const params = [
{ param: 'dashboard', value: dashboardId },
{ param: 'variables_override', value: variablesOverride },
]
.filter((n) => Boolean(n.value))
.map((n) => `${n.param}=${encodeURIComponent(JSON.stringify(n.value))}`)
.join('&')
return `/insights/${id}${params.length ? `?${params}` : ''}`
},
insightSubcriptions: (id: InsightShortId): string => `/insights/${id}/subscriptions`,
insightSubcription: (id: InsightShortId, subscriptionId: string): string =>
`/insights/${id}/subscriptions/${subscriptionId}`,

View File

@ -37,6 +37,7 @@ import type {
DatabaseSchemaField,
HogQLQuery,
HogQLQueryModifiers,
HogQLVariable,
InsightVizNode,
Node,
QueryStatus,
@ -1823,6 +1824,7 @@ export type DashboardTemplateScope = 'team' | 'global' | 'feature_flag'
export interface DashboardType<T = InsightModel> extends DashboardBasicType {
tiles: DashboardTile<T>[]
filters: DashboardFilter
variables?: Record<string, HogQLVariable>
}
export enum TemplateAvailabilityContext {
@ -2661,6 +2663,8 @@ export interface InsightLogicProps<T = InsightVizNode> {
/** Dashboard filters to override the ones in the query */
filtersOverride?: DashboardFilter | null
/** Dashboard variables to override the ones in the query */
variablesOverride?: Record<string, HogQLVariable> | null
}
export interface SetInsightOptions {

View File

@ -3,6 +3,54 @@ posthog/temporal/common/utils.py:0: note: This is likely because "from_activity"
posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type]
posthog/tasks/exports/ordered_csv_renderer.py:0: error: No return value expected [return-value]
posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment]
posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload]
posthog/utils.py:0: note: Possible overload variants:
posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any]
@ -292,8 +340,8 @@ posthog/hogql/query.py:0: error: Incompatible types in assignment (expression ha
posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" has incompatible type "LimitContext | None"; expected "LimitContext" [arg-type]
posthog/hogql/query.py:0: error: "SelectQuery" has no attribute "select_queries" [attr-defined]
posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectUnionQuery" cannot exist: would have incompatible method signatures [unreachable]
posthog/api/action.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
posthog/queries/person_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc]
posthog/api/action.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
posthog/queries/event_query/event_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc]
posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable]
posthog/hogql_queries/hogql_query_runner.py:0: error: Statement is unreachable [unreachable]
@ -383,7 +431,23 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard
posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr]
posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr]
posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr]
posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr]
posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator]
posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants:
@ -410,12 +474,19 @@ posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryvi
posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr]
posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryview" of "bytes | memoryview | None" has no attribute "decode" [union-attr]
posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr]
posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type]
posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type]
posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment]
posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "exclude_events" [attr-defined]
posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "include_events" [attr-defined]
posthog/management/commands/fix_future_person_created_at.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type]
posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type]
posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc]
posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "properties_0" (hint: "properties_0: list[<type>] = ...") [var-annotated]
posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "properties_3" (hint: "properties_3: dict[<type>, <type>] = ...") [var-annotated]
posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "filter" (hint: "filter: dict[<type>, <type>] = ...") [var-annotated]
@ -428,6 +499,21 @@ posthog/hogql/test/test_timings.py:0: error: No overload variant of "__setitem__
posthog/hogql/test/test_timings.py:0: note: Possible overload variants:
posthog/hogql/test/test_timings.py:0: note: def __setitem__(self, SupportsIndex, int, /) -> None
posthog/hogql/test/test_timings.py:0: note: def __setitem__(self, slice, Iterable[int], /) -> None
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "expr" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "SelectUnionQueryType" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr]
posthog/hogql/test/test_property.py:0: error: Argument 1 to "_property_to_expr" of "TestProperty" has incompatible type "HogQLPropertyFilter"; expected "PropertyGroup | Property | dict[Any, Any] | list[Any]" [arg-type]
posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type]
posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type]
@ -456,6 +542,9 @@ posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" ha
posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr]
posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable]
posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
@ -503,10 +592,32 @@ posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | Non
posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index]
posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index]
posthog/api/notebook.py:0: error: Incompatible types in assignment (expression has type "int", variable has type "str | None") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "StripeSourcePayload") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_create_source" has incompatible type "StripeSourcePayload"; expected "dict[Any, Any]" [arg-type]
posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment]
posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment]
posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment]
posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment]
posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value]
posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload]
posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants:
posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None
posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]]
posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Incompatible types in assignment (expression has type "list[Any]", variable has type "dict[str, list[tuple[str, str]]]") [assignment]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: Possible overload variants:
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, /) -> Sequence[str] | None
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "dict[str, list[tuple[str, str]]]"; expected "list[Any]" [arg-type]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
@ -549,28 +660,6 @@ posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" fo
posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined]
posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index]
posthog/models/test/test_organization_model.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined]
posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type]
posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type]
posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment]
posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "exclude_events" [attr-defined]
posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "include_events" [attr-defined]
posthog/management/commands/fix_future_person_created_at.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type]
posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "expr" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "SelectUnionQueryType" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr]
posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index]
posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index]
posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined]
@ -601,14 +690,12 @@ posthog/hogql/test/test_parser_python.py:0: error: Unsupported dynamic base clas
posthog/hogql/test/test_parser_cpp.py:0: error: Unsupported dynamic base class "parser_test_factory" [misc]
posthog/hogql/test/test_parse_string_python.py:0: error: Unsupported dynamic base class "parse_string_test_factory" [misc]
posthog/hogql/test/test_parse_string_cpp.py:0: error: Unsupported dynamic base class "parse_string_test_factory" [misc]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined]
posthog/hogql/database/test/test_view.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type]
posthog/hogql/database/test/test_s3_table.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type]
posthog/async_migrations/test/test_runner.py:0: error: Item "None" of "datetime | None" has no attribute "day" [union-attr]
posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type]
posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type]
posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type]
posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr]
posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr]
posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "description" [union-attr]
@ -673,18 +760,11 @@ posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has
posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has no attribute "name" [union-attr]
posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "pk" [union-attr]
posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "name" [union-attr]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Incompatible types in assignment (expression has type "list[Any]", variable has type "dict[str, list[tuple[str, str]]]") [assignment]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: Possible overload variants:
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, /) -> Sequence[str] | None
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str]
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T
posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "dict[str, list[tuple[str, str]]]"; expected "list[Any]" [arg-type]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required]
posthog/session_recordings/session_recording_api.py:0: error: Argument "team_id" to "get_realtime_snapshots" has incompatible type "int"; expected "str" [arg-type]
posthog/session_recordings/session_recording_api.py:0: error: Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "str | None" [type-var]
posthog/session_recordings/session_recording_api.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type]
@ -695,14 +775,6 @@ posthog/queries/app_metrics/historical_exports.py:0: error: Argument 1 to "loads
posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "toolbar_mode" [union-attr]
posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "save" [union-attr]
posthog/api/test/test_authentication.py:0: error: Module has no attribute "utc" [attr-defined]
posthog/admin/admins/plugin_config_admin.py:0: error: Item "None" of "Team | None" has no attribute "name" [union-attr]
posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "StripeSourcePayload") [assignment]
posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_create_source" has incompatible type "StripeSourcePayload"; expected "dict[Any, Any]" [arg-type]
posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr]
posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr]
posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr]
posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr]
@ -716,107 +788,55 @@ posthog/api/plugin.py:0: error: Incompatible type for "file_size" of "PluginAtta
posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr]
posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr]
posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/admin/admins/plugin_config_admin.py:0: error: Item "None" of "Team | None" has no attribute "name" [union-attr]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type]
ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_calls" (hint: "_execute_calls: list[<type>] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: list[<type>] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_cursors" (hint: "_cursors: list[<type>] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item]
posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "last_uploaded_part_timestamp" [attr-defined]
posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "upload_state" [attr-defined]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "job_type" to "PipelineInputs" has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/api/query.py:0: error: Statement is unreachable [unreachable]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined]
posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override]
posthog/api/test/batch_exports/conftest.py:0: note: Superclass:
posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None]
posthog/api/test/batch_exports/conftest.py:0: note: Subclass:
posthog/api/test/batch_exports/conftest.py:0: note: def run(self, loop: Any) -> Any
posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type]
posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value]
posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload]
posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants:
posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None
posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]]
posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "job_type" to "PipelineInputs" has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type]
posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable]
posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value]
posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment]
@ -859,21 +879,3 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo
posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index]
posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index]
posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable]
posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value]
posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item]
posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore]

View File

@ -144,6 +144,7 @@
"kea-waitfor": "^0.2.1",
"kea-window-values": "^3.0.0",
"lodash.merge": "^4.6.2",
"lodash.uniqby": "^4.7.0",
"maplibre-gl": "^3.5.1",
"md5": "^2.3.0",
"monaco-editor": "^0.49.0",
@ -233,6 +234,7 @@
"@types/jest": "^29.5.12",
"@types/jest-image-snapshot": "^6.1.0",
"@types/lodash.merge": "^4.6.9",
"@types/lodash.uniqby": "^4.7.9",
"@types/md5": "^2.3.0",
"@types/node": "^18.11.9",
"@types/papaparse": "^5.3.8",

View File

@ -253,6 +253,9 @@ dependencies:
lodash.merge:
specifier: ^4.6.2
version: 4.6.2
lodash.uniqby:
specifier: ^4.7.0
version: 4.7.0
maplibre-gl:
specifier: ^3.5.1
version: 3.5.1
@ -518,6 +521,9 @@ devDependencies:
'@types/lodash.merge':
specifier: ^4.6.9
version: 4.6.9
'@types/lodash.uniqby':
specifier: ^4.7.9
version: 4.7.9
'@types/node':
specifier: ^18.11.9
version: 18.11.9
@ -8397,6 +8403,12 @@ packages:
'@types/lodash': 4.14.188
dev: true
/@types/lodash.uniqby@4.7.9:
resolution: {integrity: sha512-rjrXji/seS6BZJRgXrU2h6FqxRVufsbq/HE0Tx0SdgbtlWr2YmD/M64BlYEYYlaMcpZwy32IYVkMfUMYlPuv0w==}
dependencies:
'@types/lodash': 4.14.188
dev: true
/@types/lodash@4.14.188:
resolution: {integrity: sha512-zmEmF5OIM3rb7SbLCFYoQhO4dGt2FRM9AMkxvA3LaADOF1n8in/zGJlWji9fmafLoNyz+FoL6FE0SLtGIArD7w==}
dev: true
@ -15687,6 +15699,10 @@ packages:
resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==}
dev: false
/lodash.uniqby@4.7.0:
resolution: {integrity: sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww==}
dev: false
/lodash@4.17.21:
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}

View File

@ -30,7 +30,7 @@ from posthog.models.dashboard_templates import DashboardTemplate
from posthog.models.tagged_item import TaggedItem
from posthog.models.user import User
from posthog.user_permissions import UserPermissionsSerializerMixin
from posthog.utils import filters_override_requested_by_client
from posthog.utils import filters_override_requested_by_client, variables_override_requested_by_client
logger = structlog.get_logger(__name__)
@ -126,6 +126,7 @@ class DashboardBasicSerializer(
class DashboardSerializer(DashboardBasicSerializer):
tiles = serializers.SerializerMethodField()
filters = serializers.SerializerMethodField()
variables = serializers.SerializerMethodField()
created_by = UserBasicSerializer(read_only=True)
use_template = serializers.CharField(write_only=True, allow_blank=True, required=False)
use_dashboard = serializers.IntegerField(write_only=True, allow_null=True, required=False)
@ -150,6 +151,7 @@ class DashboardSerializer(DashboardBasicSerializer):
"use_dashboard",
"delete_insights",
"filters",
"variables",
"tags",
"tiles",
"restriction_level",
@ -164,6 +166,12 @@ class DashboardSerializer(DashboardBasicSerializer):
return value
def validate_variables(self, value) -> dict:
if not isinstance(value, dict):
raise serializers.ValidationError("Variables must be a dictionary")
return value
@monitor(feature=Feature.DASHBOARD, endpoint="dashboard", method="POST")
def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Dashboard:
request = self.context["request"]
@ -301,6 +309,12 @@ class DashboardSerializer(DashboardBasicSerializer):
raise serializers.ValidationError("Filters must be a dictionary")
instance.filters = request_filters
request_variables = initial_data.get("variables")
if request_variables:
if not isinstance(request_variables, dict):
raise serializers.ValidationError("Filters must be a dictionary")
instance.variables = request_variables
instance = super().update(instance, validated_data)
user = cast(User, self.context["request"].user)
@ -410,6 +424,16 @@ class DashboardSerializer(DashboardBasicSerializer):
return dashboard.filters
def get_variables(self, dashboard: Dashboard) -> dict:
request = self.context.get("request")
if request:
variables_override = variables_override_requested_by_client(request)
if variables_override is not None:
return variables_override
return dashboard.variables
def validate(self, data):
if data.get("use_dashboard", None) and data.get("use_template", None):
raise serializers.ValidationError("`use_dashboard` and `use_template` cannot be used together")

View File

@ -60,6 +60,7 @@ from posthog.hogql.timings import HogQLTimings
from posthog.hogql_queries.apply_dashboard_filters import (
WRAPPER_NODE_KINDS,
apply_dashboard_filters_to_dict,
apply_dashboard_variables_to_dict,
)
from posthog.hogql_queries.legacy_compatibility.feature_flag import (
hogql_insights_replace_filters,
@ -109,10 +110,11 @@ from posthog.rate_limit import (
from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL
from posthog.user_permissions import UserPermissionsSerializerMixin
from posthog.utils import (
filters_override_requested_by_client,
refresh_requested_by_client,
relative_date_parse,
str_to_bool,
filters_override_requested_by_client,
variables_override_requested_by_client,
)
logger = structlog.get_logger(__name__)
@ -594,12 +596,17 @@ class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
dashboard: Optional[Dashboard] = self.context.get("dashboard")
request: Optional[Request] = self.context.get("request")
dashboard_filters_override = filters_override_requested_by_client(request) if request else None
dashboard_variables_override = variables_override_requested_by_client(request) if request else None
if hogql_insights_replace_filters(instance.team) and (
instance.query is not None or instance.query_from_filters is not None
):
query = instance.query or instance.query_from_filters
if dashboard is not None or dashboard_filters_override is not None:
if (
dashboard is not None
or dashboard_filters_override is not None
or dashboard_variables_override is not None
):
query = apply_dashboard_filters_to_dict(
query,
(
@ -611,6 +618,12 @@ class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
),
instance.team,
)
query = apply_dashboard_variables_to_dict(
query,
dashboard_variables_override or {},
instance.team,
)
representation["filters"] = {}
representation["query"] = query
else:
@ -618,7 +631,9 @@ class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
dashboard=dashboard, dashboard_filters_override=dashboard_filters_override
)
representation["query"] = instance.get_effective_query(
dashboard=dashboard, dashboard_filters_override=dashboard_filters_override
dashboard=dashboard,
dashboard_filters_override=dashboard_filters_override,
dashboard_variables_override=dashboard_variables_override,
)
if "insight" not in representation["filters"] and not representation["query"]:
@ -639,6 +654,7 @@ class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
refresh_requested = refresh_requested_by_client(self.context["request"])
execution_mode = execution_mode_from_refresh(refresh_requested)
filters_override = filters_override_requested_by_client(self.context["request"])
variables_override = variables_override_requested_by_client(self.context["request"])
if self.context.get("is_shared", False):
execution_mode = shared_insights_execution_mode(execution_mode)
@ -650,6 +666,7 @@ class InsightSerializer(InsightBasicSerializer, UserPermissionsSerializerMixin):
execution_mode=execution_mode,
user=None if self.context["request"].user.is_anonymous else self.context["request"].user,
filters_override=filters_override,
variables_override=variables_override,
)
except ExposedHogQLError as e:
raise ValidationError(str(e))

View File

@ -28,7 +28,10 @@ from posthog.errors import ExposedCHQueryError
from posthog.event_usage import report_user_action
from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt
from posthog.hogql.errors import ExposedHogQLError
from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters_to_dict
from posthog.hogql_queries.apply_dashboard_filters import (
apply_dashboard_filters_to_dict,
apply_dashboard_variables_to_dict,
)
from posthog.hogql_queries.query_runner import ExecutionMode, execution_mode_from_refresh
from posthog.models.user import User
from posthog.rate_limit import (
@ -79,6 +82,14 @@ class QueryViewSet(TeamAndOrgViewSetMixin, PydanticModelMixin, viewsets.ViewSet)
data.query.model_dump(), data.filters_override.model_dump(), self.team
) # type: ignore
if data.variables_override is not None:
if isinstance(data.query, BaseModel):
query_as_dict = data.query.model_dump()
else:
query_as_dict = data.query
data.query = apply_dashboard_variables_to_dict(query_as_dict, data.variables_override, self.team) # type: ignore
client_query_id = data.client_query_id or uuid.uuid4().hex
execution_mode = execution_mode_from_refresh(data.refresh)
response_status: int = status.HTTP_200_OK

View File

@ -18,6 +18,7 @@ from posthog.hogql_queries.query_runner import CacheMissResponse, ExecutionMode,
from posthog.models import Team, User
from posthog.schema import (
DatabaseSchemaQueryResponse,
HogQLVariable,
HogQuery,
DashboardFilter,
HogQLAutocomplete,
@ -35,6 +36,7 @@ def process_query_dict(
query_json: dict,
*,
dashboard_filters_json: Optional[dict] = None,
variables_override_json: Optional[dict] = None,
limit_context: Optional[LimitContext] = None,
execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE,
user: Optional[User] = None,
@ -44,11 +46,17 @@ def process_query_dict(
) -> dict | BaseModel:
model = QuerySchemaRoot.model_validate(query_json)
tag_queries(query=query_json)
dashboard_filters = DashboardFilter.model_validate(dashboard_filters_json) if dashboard_filters_json else None
variables_override = (
[HogQLVariable.model_validate(n) for n in variables_override_json.values()] if variables_override_json else None
)
return process_query_model(
team,
model.root,
dashboard_filters=dashboard_filters,
variables_override=variables_override,
limit_context=limit_context,
execution_mode=execution_mode,
user=user,
@ -63,6 +71,7 @@ def process_query_model(
query: BaseModel, # mypy has problems with unions and isinstance
*,
dashboard_filters: Optional[DashboardFilter] = None,
variables_override: Optional[list[HogQLVariable]] = None,
limit_context: Optional[LimitContext] = None,
execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE,
user: Optional[User] = None,
@ -80,6 +89,7 @@ def process_query_model(
team,
query.source,
dashboard_filters=dashboard_filters,
variables_override=variables_override,
limit_context=limit_context,
execution_mode=execution_mode,
user=user,
@ -119,6 +129,8 @@ def process_query_model(
else: # Query runner available - it will handle execution as well as caching
if dashboard_filters:
query_runner.apply_dashboard_filters(dashboard_filters)
if variables_override:
query_runner.apply_variable_overrides(variables_override)
result = query_runner.run(
execution_mode=execution_mode,
user=user,

View File

@ -13,6 +13,7 @@ from posthog.api.test.dashboards import DashboardAPI
from posthog.constants import AvailableFeature
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
from posthog.models import Dashboard, DashboardTile, Filter, Insight, Team, User
from posthog.models.insight_variable import InsightVariable
from posthog.models.organization import Organization
from posthog.models.project import Project
from posthog.models.sharing_configuration import SharingConfiguration
@ -1391,3 +1392,52 @@ class TestDashboard(APIBaseTest, QueryMatchingTest):
for item in response["tiles"]:
self.assertNotEqual(item.get("dashboard", None), existing_dashboard.pk)
def test_dashboard_variables(self):
variable = InsightVariable.objects.create(
team=self.team, name="Test 1", code_name="test_1", default_value="some_default_value", type="String"
)
dashboard = Dashboard.objects.create(
team=self.team,
name="dashboard 1",
created_by=self.user,
variables={
str(variable.id): {
"code_name": variable.code_name,
"variableId": str(variable.id),
"value": "some override value",
}
},
)
insight = Insight.objects.create(
filters={},
query={
"kind": "DataVisualizationNode",
"source": {
"kind": "HogQLQuery",
"query": "select {variables.test_1}",
"variables": {
str(variable.id): {
"code_name": variable.code_name,
"variableId": str(variable.id),
}
},
},
"chartSettings": {},
"tableSettings": {},
},
team=self.team,
last_refresh=now(),
)
DashboardTile.objects.create(dashboard=dashboard, insight=insight)
response_data = self.dashboard_api.get_dashboard(dashboard.pk)
assert response_data["variables"] is not None
assert isinstance(response_data["variables"], dict)
assert len(response_data["variables"].keys()) == 1
for key, value in response_data["variables"].items():
assert key == str(variable.id)
assert value["code_name"] == variable.code_name
assert value["variableId"] == str(variable.id)
assert value["value"] == "some override value"

View File

@ -33,6 +33,7 @@ from posthog.models import (
User,
)
from posthog.models.insight_caching_state import InsightCachingState
from posthog.models.insight_variable import InsightVariable
from posthog.models.project import Project
from posthog.schema import (
DataTableNode,
@ -380,6 +381,7 @@ class TestInsight(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest):
team=self.team,
user=mock.ANY,
filters_override=None,
variables_override=None,
)
with patch(
@ -393,6 +395,7 @@ class TestInsight(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest):
team=self.team,
user=mock.ANY,
filters_override=None,
variables_override=None,
)
def test_get_insight_by_short_id(self) -> None:
@ -3596,3 +3599,60 @@ class TestInsight(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest):
self.assertNotIn("code", response)
self.assertIsNotNone(response["results"][0]["types"])
def test_insight_variables_overrides(self):
dashboard = Dashboard.objects.create(
team=self.team,
name="dashboard 1",
created_by=self.user,
)
variable = InsightVariable.objects.create(
team=self.team, name="Test 1", code_name="test_1", default_value="some_default_value", type="String"
)
insight = Insight.objects.create(
filters={},
query={
"kind": "DataVisualizationNode",
"source": {
"kind": "HogQLQuery",
"query": "select {variables.test_1}",
"variables": {
str(variable.id): {
"code_name": variable.code_name,
"variableId": str(variable.id),
}
},
},
"chartSettings": {},
"tableSettings": {},
},
team=self.team,
)
DashboardTile.objects.create(dashboard=dashboard, insight=insight)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/{insight.pk}",
data={
"from_dashboard": dashboard.pk,
"variables_override": json.dumps(
{
str(variable.id): {
"code_name": variable.code_name,
"variableId": str(variable.id),
"value": "override value!",
}
}
),
},
).json()
assert isinstance(response["query"], dict)
assert isinstance(response["query"]["source"], dict)
assert isinstance(response["query"]["source"]["variables"], dict)
assert len(response["query"]["source"]["variables"].keys()) == 1
for key, value in response["query"]["source"]["variables"].items():
assert key == str(variable.id)
assert value["code_name"] == variable.code_name
assert value["variableId"] == str(variable.id)
assert value["value"] == "override value!"

View File

@ -130,6 +130,7 @@ def calculate_for_query_based_insight(
execution_mode: ExecutionMode,
user: Optional[User],
filters_override: Optional[dict] = None,
variables_override: Optional[dict] = None,
) -> "InsightResult":
from posthog.caching.fetch_from_cache import InsightResult, NothingInCacheResult
from posthog.caching.insight_cache import update_cached_state
@ -144,6 +145,13 @@ def calculate_for_query_based_insight(
dashboard_filters_json=(
filters_override if filters_override is not None else dashboard.filters if dashboard is not None else None
),
variables_override_json=(
variables_override
if variables_override is not None
else dashboard.variables
if dashboard is not None
else None
),
execution_mode=execution_mode,
user=user,
insight_id=insight.pk,

View File

@ -22,3 +22,31 @@ def apply_dashboard_filters_to_dict(query: dict, filters: dict, team: Team) -> d
return query
query_runner.apply_dashboard_filters(DashboardFilter(**filters))
return query_runner.query.model_dump()
# Apply the variables from the django-style Dashboard object
def apply_dashboard_variables_to_dict(query: dict, variables_overrides: dict[str, dict], team: Team) -> dict:
if not variables_overrides:
return query
if query.get("kind") in WRAPPER_NODE_KINDS:
source = apply_dashboard_variables_to_dict(query["source"], variables_overrides, team)
return {**query, "source": source}
if query.get("kind") == NodeKind.HOG_QL_QUERY:
query_variables: dict[str, dict] | None = query.get("variables")
if query_variables is None:
return query
for variable_id, overriden_hogql_variable in variables_overrides.items():
query_variable = query_variables.get(variable_id)
if query_variable:
query_variables[variable_id] = {
"variableId": variable_id,
"code_name": query_variable["code_name"],
"value": overriden_hogql_variable.get("value"),
}
return {**query, "variables": query_variables}
return query

View File

@ -33,6 +33,7 @@ from posthog.schema import (
FunnelsQuery,
HogQLQuery,
HogQLQueryModifiers,
HogQLVariable,
InsightActorsQuery,
InsightActorsQueryOptions,
LifecycleQuery,
@ -721,6 +722,20 @@ class QueryRunner(ABC, Generic[Q, R, CR]):
def _refresh_frequency(self) -> timedelta:
return timedelta(minutes=1)
def apply_variable_overrides(self, variable_overrides: list[HogQLVariable]):
"""Irreversably update self.query with provided variable overrides."""
if not hasattr(self.query, "variables") or not self.query.kind == "HogQLQuery" or len(variable_overrides) == 0:
return
assert isinstance(self.query, HogQLQuery)
if not self.query.variables:
return
for variable in variable_overrides:
if self.query.variables.get(variable.variableId):
self.query.variables[variable.variableId] = variable
def apply_dashboard_filters(self, dashboard_filter: DashboardFilter):
"""Irreversably update self.query with provided dashboard filters."""
if not hasattr(self.query, "properties") or not hasattr(self.query, "dateRange"):

View File

@ -196,9 +196,17 @@ class Insight(models.Model):
return self.filters
def get_effective_query(
self, *, dashboard: Optional[Dashboard], dashboard_filters_override: Optional[dict] = None
self,
*,
dashboard: Optional[Dashboard],
dashboard_filters_override: Optional[dict] = None,
dashboard_variables_override: Optional[dict[str, dict]] = None,
) -> Optional[dict]:
from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters_to_dict
from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_variables_to_dict
if self.query and dashboard_variables_override:
self.query = apply_dashboard_variables_to_dict(self.query, dashboard_variables_override or {}, self.team)
if not (dashboard or dashboard_filters_override) or not self.query:
return self.query

View File

@ -6372,6 +6372,7 @@ class QueryRequest(BaseModel):
" `query_status` response field."
),
)
variables_override: Optional[dict[str, dict[str, Any]]] = None
class QuerySchemaRoot(

View File

@ -1068,6 +1068,20 @@ def filters_override_requested_by_client(request: Request) -> Optional[dict]:
return None
def variables_override_requested_by_client(request: Request) -> Optional[dict[str, dict]]:
raw_variables = request.query_params.get("variables_override")
if raw_variables is not None:
try:
return json.loads(raw_variables)
except Exception:
raise serializers.ValidationError(
{"variables_override": "Invalid JSON passed in variables_override parameter"}
)
return None
def _request_has_key_set(key: str, request: Request, allowed_values: Optional[list[str]] = None) -> bool | str:
query_param = request.query_params.get(key)
data_value = request.data.get(key)