feat(max): OpenAI data processing opt-in (#26248)
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 35 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 35 KiB |
@ -959,7 +959,7 @@ export const HedgehogBuddy = React.forwardRef<HTMLDivElement, HedgehogBuddyProps
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
onPositionChange?.(actor)
|
onPositionChange?.(actor)
|
||||||
}, [actor.x, actor.y])
|
}, [actor.x, actor.y, actor.direction])
|
||||||
|
|
||||||
const onClick = (): void => {
|
const onClick = (): void => {
|
||||||
!actor.isDragging && _onClick?.(actor)
|
!actor.isDragging && _onClick?.(actor)
|
||||||
|
@ -199,7 +199,7 @@ export const Popover = React.forwardRef<HTMLDivElement, PopoverProps>(function P
|
|||||||
if (visible && referenceRef?.current && floatingElement) {
|
if (visible && referenceRef?.current && floatingElement) {
|
||||||
return autoUpdate(referenceRef.current, floatingElement, update)
|
return autoUpdate(referenceRef.current, floatingElement, update)
|
||||||
}
|
}
|
||||||
}, [visible, referenceRef?.current, floatingElement, ...additionalRefs])
|
}, [visible, placement, referenceRef?.current, floatingElement, ...additionalRefs])
|
||||||
|
|
||||||
const floatingContainer = useFloatingContainer()
|
const floatingContainer = useFloatingContainer()
|
||||||
|
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
import { useValues } from 'kea'
|
import { offset } from '@floating-ui/react'
|
||||||
|
import { LemonButton, Popover } from '@posthog/lemon-ui'
|
||||||
|
import { useActions, useValues } from 'kea'
|
||||||
import { HedgehogBuddy } from 'lib/components/HedgehogBuddy/HedgehogBuddy'
|
import { HedgehogBuddy } from 'lib/components/HedgehogBuddy/HedgehogBuddy'
|
||||||
import { hedgehogBuddyLogic } from 'lib/components/HedgehogBuddy/hedgehogBuddyLogic'
|
import { hedgehogBuddyLogic } from 'lib/components/HedgehogBuddy/hedgehogBuddyLogic'
|
||||||
import { useMemo } from 'react'
|
import { useMemo, useState } from 'react'
|
||||||
|
|
||||||
|
import { maxGlobalLogic } from './maxGlobalLogic'
|
||||||
import { maxLogic } from './maxLogic'
|
import { maxLogic } from './maxLogic'
|
||||||
|
|
||||||
const HEADLINES = [
|
const HEADLINES = [
|
||||||
@ -14,8 +17,12 @@ const HEADLINES = [
|
|||||||
|
|
||||||
export function Intro(): JSX.Element {
|
export function Intro(): JSX.Element {
|
||||||
const { hedgehogConfig } = useValues(hedgehogBuddyLogic)
|
const { hedgehogConfig } = useValues(hedgehogBuddyLogic)
|
||||||
|
const { acceptDataProcessing } = useActions(maxGlobalLogic)
|
||||||
|
const { dataProcessingAccepted } = useValues(maxGlobalLogic)
|
||||||
const { sessionId } = useValues(maxLogic)
|
const { sessionId } = useValues(maxLogic)
|
||||||
|
|
||||||
|
const [hedgehogDirection, setHedgehogDirection] = useState<'left' | 'right'>('right')
|
||||||
|
|
||||||
const headline = useMemo(() => {
|
const headline = useMemo(() => {
|
||||||
return HEADLINES[parseInt(sessionId.split('-').at(-1) as string, 16) % HEADLINES.length]
|
return HEADLINES[parseInt(sessionId.split('-').at(-1) as string, 16) % HEADLINES.length]
|
||||||
}, [])
|
}, [])
|
||||||
@ -23,22 +30,52 @@ export function Intro(): JSX.Element {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="flex">
|
<div className="flex">
|
||||||
<HedgehogBuddy
|
<Popover
|
||||||
static
|
overlay={
|
||||||
hedgehogConfig={{
|
<div className="m-1.5">
|
||||||
...hedgehogConfig,
|
<p className="font-medium text-pretty mb-1.5">
|
||||||
walking_enabled: false,
|
Hi! I use OpenAI services to analyze your data,
|
||||||
controls_enabled: false,
|
<br />
|
||||||
}}
|
so that you can focus on building. This <em>can</em> include
|
||||||
onClick={(actor) => {
|
<br />
|
||||||
if (Math.random() < 0.01) {
|
personal data of your users, if you're capturing it.
|
||||||
actor.setOnFire()
|
<br />
|
||||||
} else {
|
<em>Your data won't be used for training models.</em>
|
||||||
actor.setRandomAnimation()
|
</p>
|
||||||
|
<LemonButton type="secondary" size="small" onClick={() => acceptDataProcessing()}>
|
||||||
|
Got it, I accept OpenAI processing data
|
||||||
|
</LemonButton>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
placement={`${hedgehogDirection}-end`}
|
||||||
|
middleware={[offset(-12)]}
|
||||||
|
showArrow
|
||||||
|
visible={!dataProcessingAccepted}
|
||||||
|
>
|
||||||
|
<HedgehogBuddy
|
||||||
|
static
|
||||||
|
hedgehogConfig={{
|
||||||
|
...hedgehogConfig,
|
||||||
|
walking_enabled: false,
|
||||||
|
controls_enabled: false,
|
||||||
|
}}
|
||||||
|
onClick={(actor) => {
|
||||||
|
if (Math.random() < 0.01) {
|
||||||
|
actor.setOnFire()
|
||||||
|
} else {
|
||||||
|
actor.setRandomAnimation()
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
onActorLoaded={(actor) =>
|
||||||
|
setTimeout(() => {
|
||||||
|
actor.setAnimation('wave')
|
||||||
|
// Always start out facing right so that the data processing popover is more readable
|
||||||
|
actor.direction = 'right'
|
||||||
|
}, 100)
|
||||||
}
|
}
|
||||||
}}
|
onPositionChange={(actor) => setHedgehogDirection(actor.direction)}
|
||||||
onActorLoaded={(actor) => setTimeout(() => actor.setAnimation('wave'), 100)}
|
/>
|
||||||
/>
|
</Popover>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-center mb-3">
|
<div className="text-center mb-3">
|
||||||
<h2 className="text-2xl font-bold mb-2 text-balance">{headline}</h2>
|
<h2 className="text-2xl font-bold mb-2 text-balance">{headline}</h2>
|
||||||
|
@ -8,6 +8,7 @@ import { mswDecorator, useStorybookMocks } from '~/mocks/browser'
|
|||||||
|
|
||||||
import { chatResponseChunk, failureChunk, generationFailureChunk } from './__mocks__/chatResponse.mocks'
|
import { chatResponseChunk, failureChunk, generationFailureChunk } from './__mocks__/chatResponse.mocks'
|
||||||
import { MaxInstance } from './Max'
|
import { MaxInstance } from './Max'
|
||||||
|
import { maxGlobalLogic } from './maxGlobalLogic'
|
||||||
import { maxLogic } from './maxLogic'
|
import { maxLogic } from './maxLogic'
|
||||||
|
|
||||||
const meta: Meta = {
|
const meta: Meta = {
|
||||||
@ -31,6 +32,12 @@ export default meta
|
|||||||
const SESSION_ID = 'b1b4b3b4-1b3b-4b3b-1b3b4b3b4b3b'
|
const SESSION_ID = 'b1b4b3b4-1b3b-4b3b-1b3b4b3b4b3b'
|
||||||
|
|
||||||
const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element => {
|
const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element => {
|
||||||
|
const { acceptDataProcessing } = useActions(maxGlobalLogic)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
acceptDataProcessing()
|
||||||
|
}, [])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="relative flex flex-col h-fit">
|
<div className="relative flex flex-col h-fit">
|
||||||
<BindLogic logic={maxLogic} props={{ sessionId: SESSION_ID }}>
|
<BindLogic logic={maxLogic} props={{ sessionId: SESSION_ID }}>
|
||||||
@ -56,6 +63,11 @@ export const Welcome: StoryFn = () => {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
const { acceptDataProcessing } = useActions(maxGlobalLogic)
|
||||||
|
useEffect(() => {
|
||||||
|
// We override data processing opt-in to false, so that wee see the welcome screen as a first-time user would
|
||||||
|
acceptDataProcessing(false)
|
||||||
|
}, [])
|
||||||
|
|
||||||
return <Template sessionId={SESSION_ID} />
|
return <Template sessionId={SESSION_ID} />
|
||||||
}
|
}
|
||||||
@ -65,7 +77,7 @@ export const WelcomeSuggestionsAvailable: StoryFn = () => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadCurrentProjectSuccess({ ...MOCK_DEFAULT_PROJECT, product_description: 'A Storybook test.' })
|
loadCurrentProjectSuccess({ ...MOCK_DEFAULT_PROJECT, product_description: 'A Storybook test.' })
|
||||||
})
|
}, [])
|
||||||
|
|
||||||
return <Welcome />
|
return <Welcome />
|
||||||
}
|
}
|
||||||
@ -81,7 +93,7 @@ export const WelcomeLoadingSuggestions: StoryFn = () => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadCurrentProjectSuccess({ ...MOCK_DEFAULT_PROJECT, product_description: 'A Storybook test.' })
|
loadCurrentProjectSuccess({ ...MOCK_DEFAULT_PROJECT, product_description: 'A Storybook test.' })
|
||||||
})
|
}, [])
|
||||||
|
|
||||||
return <Template sessionId={SESSION_ID} />
|
return <Template sessionId={SESSION_ID} />
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,11 @@ import clsx from 'clsx'
|
|||||||
import { useActions, useValues } from 'kea'
|
import { useActions, useValues } from 'kea'
|
||||||
import { useEffect, useRef } from 'react'
|
import { useEffect, useRef } from 'react'
|
||||||
|
|
||||||
|
import { maxGlobalLogic } from './maxGlobalLogic'
|
||||||
import { maxLogic } from './maxLogic'
|
import { maxLogic } from './maxLogic'
|
||||||
|
|
||||||
export function QuestionInput(): JSX.Element {
|
export function QuestionInput(): JSX.Element {
|
||||||
|
const { dataProcessingAccepted } = useValues(maxGlobalLogic)
|
||||||
const { question, thread, threadLoading } = useValues(maxLogic)
|
const { question, thread, threadLoading } = useValues(maxLogic)
|
||||||
const { askMax, setQuestion } = useActions(maxLogic)
|
const { askMax, setQuestion } = useActions(maxLogic)
|
||||||
|
|
||||||
@ -48,7 +50,15 @@ export function QuestionInput(): JSX.Element {
|
|||||||
type={isFloating && !question ? 'secondary' : 'primary'}
|
type={isFloating && !question ? 'secondary' : 'primary'}
|
||||||
onClick={() => askMax(question)}
|
onClick={() => askMax(question)}
|
||||||
tooltip="Let's go!"
|
tooltip="Let's go!"
|
||||||
disabledReason={!question ? 'I need some input first' : threadLoading ? 'Thinking…' : undefined}
|
disabledReason={
|
||||||
|
!dataProcessingAccepted
|
||||||
|
? 'Please accept OpenAI processing data'
|
||||||
|
: !question
|
||||||
|
? 'I need some input first'
|
||||||
|
: threadLoading
|
||||||
|
? 'Thinking…'
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
size="small"
|
size="small"
|
||||||
icon={<IconArrowRight />}
|
icon={<IconArrowRight />}
|
||||||
/>
|
/>
|
||||||
|
@ -4,9 +4,11 @@ import { useActions, useValues } from 'kea'
|
|||||||
|
|
||||||
import { sidePanelSettingsLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic'
|
import { sidePanelSettingsLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic'
|
||||||
|
|
||||||
|
import { maxGlobalLogic } from './maxGlobalLogic'
|
||||||
import { maxLogic } from './maxLogic'
|
import { maxLogic } from './maxLogic'
|
||||||
|
|
||||||
export function QuestionSuggestions(): JSX.Element {
|
export function QuestionSuggestions(): JSX.Element {
|
||||||
|
const { dataProcessingAccepted } = useValues(maxGlobalLogic)
|
||||||
const { visibleSuggestions, allSuggestionsLoading, currentProject } = useValues(maxLogic)
|
const { visibleSuggestions, allSuggestionsLoading, currentProject } = useValues(maxLogic)
|
||||||
const { askMax, shuffleVisibleSuggestions } = useActions(maxLogic)
|
const { askMax, shuffleVisibleSuggestions } = useActions(maxLogic)
|
||||||
const { openSettingsPanel } = useActions(sidePanelSettingsLogic)
|
const { openSettingsPanel } = useActions(sidePanelSettingsLogic)
|
||||||
@ -56,6 +58,9 @@ export function QuestionSuggestions(): JSX.Element {
|
|||||||
sideIcon={<IconArrowUpRight />}
|
sideIcon={<IconArrowUpRight />}
|
||||||
center
|
center
|
||||||
className="shrink"
|
className="shrink"
|
||||||
|
disabledReason={
|
||||||
|
!dataProcessingAccepted ? 'Please accept OpenAI processing data' : undefined
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{suggestion}
|
{suggestion}
|
||||||
</LemonButton>
|
</LemonButton>
|
||||||
|
19
frontend/src/scenes/max/maxGlobalLogic.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import { actions, kea, path, reducers } from 'kea'
|
||||||
|
|
||||||
|
import type { maxGlobalLogicType } from './maxGlobalLogicType'
|
||||||
|
|
||||||
|
export const maxGlobalLogic = kea<maxGlobalLogicType>([
|
||||||
|
path(['scenes', 'max', 'maxGlobalLogic']),
|
||||||
|
actions({
|
||||||
|
acceptDataProcessing: (testOnlyOverride?: boolean) => ({ testOnlyOverride }),
|
||||||
|
}),
|
||||||
|
reducers({
|
||||||
|
dataProcessingAccepted: [
|
||||||
|
false,
|
||||||
|
{ persist: true },
|
||||||
|
{
|
||||||
|
acceptDataProcessing: (_, { testOnlyOverride }) => testOnlyOverride ?? true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
])
|
@ -99,6 +99,7 @@ export const maxLogic = kea<maxLogicType>([
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
loaders({
|
loaders({
|
||||||
|
// TODO: Move question suggestions to `maxGlobalLogic`, which will make this logic `maxThreadLogic`
|
||||||
allSuggestions: [
|
allSuggestions: [
|
||||||
null as string[] | null,
|
null as string[] | null,
|
||||||
{
|
{
|
||||||
|