diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png index e7d6b2a7d83..9e94e50e113 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png index d360db633aa..afcf5cd9c03 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png index 7f417ca6f1d..d66b1f11c1a 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png index 04c5d123699..eef82575bea 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png index ce20b2d82ad..687b3e52583 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png index bf60bd921e3..01823e88fef 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png index 0725eb97f8d..64260538a2c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png index f84db4546c2..923fe9d53a9 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png index 2dce17c37f6..8ac25dbcb68 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index c8cd83be7e7..997b627decd 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index e8fe754ac07..b535dd4aeeb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png index 24b46c28263..152a1287d27 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png index ff07ce03253..f41f2f77fb8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index 07bdfe66828..df5f78ab272 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -175,7 +175,7 @@ .Sidebar3000 { --sidebar-slider-padding: 0.125rem; --sidebar-horizontal-padding: 0.5rem; - --sidebar-row-height: 2rem; + --sidebar-row-height: 2.5rem; --sidebar-background: var(--bg-3000); position: relative; @@ -451,7 +451,8 @@ } // Accommodate menu button by moving stuff out of the way - &.SidebarListItem--has-menu:not(.SidebarListItem--extended) .SidebarListItem__link { + &.SidebarListItem--has-menu:not(.SidebarListItem--extended) .SidebarListItem__link, + &.SidebarListItem--has-menu:not(.SidebarListItem--extended) .SidebarListItem__button { padding-right: calc(var(--sidebar-horizontal-padding) + 1.25rem); } @@ -523,6 +524,7 @@ } } +.SidebarListItem__button, .SidebarListItem__link, .SidebarListItem__rename { --sidebar-list-item-inset: calc( @@ -555,6 +557,17 @@ } } +.SidebarListItem__button { + row-gap: 1px; + padding: 0 var(--sidebar-horizontal-padding) 0 var(--sidebar-list-item-inset); + color: inherit !important; // Disable link color + cursor: pointer; + + &:hover { + background: var(--border-3000); + } +} + .SidebarListItem__rename { // Pseudo-elements don't work on inputs, so we use a wrapper div background: var(--bg-light); diff --git a/frontend/src/layout/navigation-3000/components/Navbar.tsx b/frontend/src/layout/navigation-3000/components/Navbar.tsx index 62308871fc1..c0c64122782 100644 --- a/frontend/src/layout/navigation-3000/components/Navbar.tsx +++ b/frontend/src/layout/navigation-3000/components/Navbar.tsx @@ -27,7 +27,7 @@ export function Navbar(): JSX.Element { const { isAccountPopoverOpen, systemStatusHealthy } = useValues(navigationLogic) const { closeAccountPopover, toggleAccountPopover } = useActions(navigationLogic) const { isNavShown, isSidebarShown, activeNavbarItemId, navbarItems, mobileLayout } = useValues(navigation3000Logic) - const { showSidebar, hideSidebar, toggleNavCollapsed, hideNavOnMobile } = useActions(navigation3000Logic) + const { toggleNavCollapsed, hideNavOnMobile, showSidebar, hideSidebar } = useActions(navigation3000Logic) const { featureFlags } = useValues(featureFlagLogic) const { toggleSearchBar } = useActions(commandBarLogic) diff --git a/frontend/src/layout/navigation-3000/components/Sidebar.tsx b/frontend/src/layout/navigation-3000/components/Sidebar.tsx index 52610910586..96497e047ff 100644 --- a/frontend/src/layout/navigation-3000/components/Sidebar.tsx +++ b/frontend/src/layout/navigation-3000/components/Sidebar.tsx @@ -19,8 +19,16 @@ const SEARCH_DEBOUNCE_MS = 300 interface SidebarProps { navbarItem: SidebarNavbarItem // Sidebar can only be rendered if there's an active sidebar navbar item + sidebarOverlay?: React.ReactNode + sidebarOverlayProps?: SidebarOverlayProps } -export function Sidebar({ navbarItem }: SidebarProps): JSX.Element { + +interface SidebarOverlayProps { + className?: string + isOpen?: boolean +} + +export function Sidebar({ navbarItem, sidebarOverlay, sidebarOverlayProps }: SidebarProps): JSX.Element { const inputElementRef = useRef(null) const { @@ -81,6 +89,11 @@ export function Sidebar({ navbarItem }: SidebarProps): JSX.Element { } }} /> + {sidebarOverlay && ( + + {sidebarOverlay} + + )} ) } @@ -199,3 +212,24 @@ function SidebarKeyboardShortcut(): JSX.Element { ) } + +function SidebarOverlay({ + className, + isOpen = false, + children, + width, +}: SidebarOverlayProps & { children: React.ReactNode; width: number }): JSX.Element | null { + if (!isOpen) { + return null + } + + return ( +
+ {children} +
+ ) +} diff --git a/frontend/src/layout/navigation-3000/components/SidebarList.tsx b/frontend/src/layout/navigation-3000/components/SidebarList.tsx index d42b257b15d..2b63b9a61e9 100644 --- a/frontend/src/layout/navigation-3000/components/SidebarList.tsx +++ b/frontend/src/layout/navigation-3000/components/SidebarList.tsx @@ -13,7 +13,14 @@ import { InfiniteLoader } from 'react-virtualized/dist/es/InfiniteLoader' import { List, ListProps } from 'react-virtualized/dist/es/List' import { ITEM_KEY_PART_SEPARATOR, navigation3000Logic } from '../navigationLogic' -import { BasicListItem, ExtendedListItem, ExtraListItemContext, SidebarCategory, TentativeListItem } from '../types' +import { + BasicListItem, + ButtonListItem, + ExtendedListItem, + ExtraListItemContext, + SidebarCategory, + TentativeListItem, +} from '../types' import { KeyboardShortcut } from './KeyboardShortcut' export function SidebarList({ category }: { category: SidebarCategory }): JSX.Element { @@ -122,7 +129,7 @@ export function SidebarList({ category }: { category: SidebarCategory }): JSX.El } interface SidebarListItemProps { - item: BasicListItem | ExtendedListItem | TentativeListItem + item: BasicListItem | ExtendedListItem | TentativeListItem | ButtonListItem validateName?: SidebarCategory['validateName'] active?: boolean style: React.CSSProperties @@ -132,6 +139,10 @@ function isItemTentative(item: SidebarListItemProps['item']): item is TentativeL return 'onSave' in item } +function isItemClickable(item: SidebarListItemProps['item']): item is ButtonListItem { + return 'onClick' in item +} + function SidebarListItem({ item, validateName, active, style }: SidebarListItemProps): JSX.Element { const [isMenuOpen, setIsMenuOpen] = useState(false) const [newName, setNewName] = useState(null) @@ -218,7 +229,13 @@ function SidebarListItem({ item, validateName, active, style }: SidebarListItemP }) // Intentionally run on every render so that ref value changes are picked up let content: JSX.Element - if (!save || (!isItemTentative(item) && newName === null)) { + if (isItemClickable(item)) { + content = ( +
  • +
    {item.name}
    +
  • + ) + } else if (!save || (!isItemTentative(item) && newName === null)) { if (isItemTentative(item)) { throw new Error('Tentative items should not be rendered in read mode') } diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index ca43417d405..4a81a00349c 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -31,6 +31,7 @@ import { LemonMenuOverlay } from 'lib/lemon-ui/LemonMenu/LemonMenu' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { isNotNil } from 'lib/utils' import React from 'react' +import { editorSidebarLogic } from 'scenes/data-warehouse/editor/editorSidebarLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' import { teamLogic } from 'scenes/teamLogic' @@ -103,9 +104,6 @@ export const navigation3000Logic = kea([ reducers({ isSidebarShown: [ true, - { - persist: true, - }, { hideSidebar: () => false, showSidebar: () => true, @@ -514,9 +512,10 @@ export const navigation3000Logic = kea([ featureFlags[FEATURE_FLAGS.SQL_EDITOR] ? { identifier: Scene.SQLEditor, - label: 'SQL editor', + label: 'Data warehouse', icon: , - to: isUsingSidebar ? undefined : urls.sqlEditor(), + to: urls.sqlEditor(), + logic: editorSidebarLogic, } : null, featureFlags[FEATURE_FLAGS.DATA_MODELING] && hasOnboardedAnyProduct @@ -598,6 +597,9 @@ export const navigation3000Logic = kea([ activeNavbarItemId: [ (s) => [s.activeNavbarItemIdRaw, featureFlagLogic.selectors.featureFlags], (activeNavbarItemIdRaw, featureFlags): string | null => { + if (featureFlags[FEATURE_FLAGS.SQL_EDITOR] && activeNavbarItemIdRaw === Scene.SQLEditor) { + return Scene.SQLEditor + } if (!featureFlags[FEATURE_FLAGS.POSTHOG_3000_NAV]) { return null } diff --git a/frontend/src/layout/navigation-3000/types.ts b/frontend/src/layout/navigation-3000/types.ts index 2ef13b34c25..3f79f6dbda4 100644 --- a/frontend/src/layout/navigation-3000/types.ts +++ b/frontend/src/layout/navigation-3000/types.ts @@ -104,6 +104,7 @@ export interface BasicListItem { * URL within the app. In specific cases this can be null - such items are italicized. */ url: string | null + onClick?: () => void /** An optional marker to highlight item state. */ marker?: { /** A marker of type `fold` is a small triangle in the top left, `ribbon` is a narrow ribbon to the left. */ @@ -146,3 +147,8 @@ export interface TentativeListItem { adding: boolean ref?: BasicListItem['ref'] } + +export interface ButtonListItem extends BasicListItem { + key: '__button__' + onClick: () => void +} diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index c4e17361415..774bf3522ad 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -2206,7 +2206,7 @@ const api = { }, async update( viewId: DataWarehouseSavedQuery['id'], - data: Pick + data: Partial ): Promise { return await new ApiRequest().dataWarehouseSavedQuery(viewId).update({ data }) }, diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index cb5831d6a05..29105a42624 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -230,6 +230,7 @@ export const FEATURE_FLAGS = { EDIT_DWH_SOURCE_CONFIG: 'edit_dwh_source_config', // owner: @Gilbert09 #team-data-warehouse AI_SURVEY_RESPONSE_SUMMARY: 'ai-survey-response-summary', // owner: @pauldambra CUSTOM_CHANNEL_TYPE_RULES: 'custom-channel-type-rules', // owner: @robbie-c #team-web-analytics + SELF_SERVE_CREDIT_OVERRIDE: 'self-serve-credit-override', // owner: @zach EXPERIMENTS_MIGRATION_DISABLE_UI: 'experiments-migration-disable-ui', // owner: @jurajmajerik #team-experiments } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonDialog/LemonDialog.tsx b/frontend/src/lib/lemon-ui/LemonDialog/LemonDialog.tsx index ca3b0a1cf25..91c8af8355c 100644 --- a/frontend/src/lib/lemon-ui/LemonDialog/LemonDialog.tsx +++ b/frontend/src/lib/lemon-ui/LemonDialog/LemonDialog.tsx @@ -12,6 +12,7 @@ export type LemonFormDialogProps = LemonDialogFormPropsType & Omit & { initialValues: Record onSubmit: (values: Record) => void | Promise + shouldAwaitSubmit?: boolean } export type LemonDialogProps = Pick< @@ -26,6 +27,7 @@ export type LemonDialogProps = Pick< onClose?: () => void onAfterClose?: () => void closeOnNavigate?: boolean + shouldAwaitSubmit?: boolean } export function LemonDialog({ @@ -37,12 +39,14 @@ export function LemonDialog({ content, initialFormValues, closeOnNavigate = true, + shouldAwaitSubmit = false, footer, ...props }: LemonDialogProps): JSX.Element { const [isOpen, setIsOpen] = useState(true) const { currentLocation } = useValues(router) const lastLocation = useRef(currentLocation.pathname) + const [isLoading, setIsLoading] = useState(false) primaryButton = primaryButton || @@ -63,8 +67,20 @@ export function LemonDialog({ { - button.onClick?.(e) + loading={button === primaryButton && shouldAwaitSubmit ? isLoading : undefined} + // eslint-disable-next-line @typescript-eslint/no-misused-promises + onClick={async (e) => { + if (button === primaryButton && shouldAwaitSubmit) { + setIsLoading(true) + try { + // eslint-disable-next-line @typescript-eslint/await-thenable + await button.onClick?.(e) + } finally { + setIsLoading(false) + } + } else { + button.onClick?.(e) + } setIsOpen(false) }} /> @@ -117,7 +133,8 @@ export const LemonFormDialog = ({ type: 'primary', children: 'Submit', htmlType: 'submit', - onClick: () => void onSubmit(form), + // eslint-disable-next-line @typescript-eslint/no-misused-promises + onClick: props.shouldAwaitSubmit ? async () => await onSubmit(form) : () => void onSubmit(form), disabledReason: !isFormValid ? firstError : undefined, } diff --git a/frontend/src/scenes/billing/CreditCTAHero.tsx b/frontend/src/scenes/billing/CreditCTAHero.tsx index 0f076245c0a..85742aa8ef1 100644 --- a/frontend/src/scenes/billing/CreditCTAHero.tsx +++ b/frontend/src/scenes/billing/CreditCTAHero.tsx @@ -2,19 +2,25 @@ import { IconX } from '@posthog/icons' import { LemonButton, LemonDivider } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { BurningMoneyHog } from 'lib/components/hedgehogs' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import useResizeObserver from 'use-resize-observer' import { billingLogic } from './billingLogic' import { PurchaseCreditsModal } from './PurchaseCreditsModal' +export const DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD = 500 + export const CreditCTAHero = (): JSX.Element | null => { const { width, ref: heroRef } = useResizeObserver() + const { featureFlags } = useValues(featureFlagLogic) const { creditOverview, isPurchaseCreditsModalOpen, isCreditCTAHeroDismissed, computedDiscount } = useValues(billingLogic) const { showPurchaseCreditsModal, toggleCreditCTAHeroDismissed } = useActions(billingLogic) - if (!creditOverview.eligible || creditOverview.status === 'paid') { + const isEligible = creditOverview.eligible || featureFlags[FEATURE_FLAGS.SELF_SERVE_CREDIT_OVERRIDE] + if (creditOverview.status === 'paid' || !isEligible) { return null } @@ -37,6 +43,8 @@ export const CreditCTAHero = (): JSX.Element | null => { ) } + const estimatedMonthlyCreditAmountUsd = + creditOverview?.estimated_monthly_credit_amount_usd || DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD return (
    {
    )}
    - {creditOverview.eligible && creditOverview.status === 'pending' && ( + {isEligible && creditOverview.status === 'pending' && ( <>

    We're applying your credits

    @@ -78,7 +86,7 @@ export const CreditCTAHero = (): JSX.Element | null => { )} )} - {creditOverview.eligible && creditOverview.status === 'none' && ( + {isEligible && (!creditOverview || creditOverview.status === 'none') && ( <>

    Stop burning money.{' '} @@ -87,20 +95,20 @@ export const CreditCTAHero = (): JSX.Element | null => {

    Based on your usage, your monthly bill is forecasted to be an average of{' '} - ${creditOverview.estimated_monthly_credit_amount_usd.toFixed(0)}/month over - the next year. + ${estimatedMonthlyCreditAmountUsd.toFixed(0)}/month over the next year.

    This qualifies you for a {computedDiscount * 100}% discount by pre-purchasing usage credits. Which gives you a net savings of{' '} $ - {Math.round( - creditOverview.estimated_monthly_credit_amount_usd * computedDiscount * 12 - ).toLocaleString('en-US', { - minimumFractionDigits: 0, - maximumFractionDigits: 0, - })} + {Math.round(estimatedMonthlyCreditAmountUsd * computedDiscount * 12).toLocaleString( + 'en-US', + { + minimumFractionDigits: 0, + maximumFractionDigits: 0, + } + )} {' '} over the next year.

    diff --git a/frontend/src/scenes/billing/PurchaseCreditsModal.tsx b/frontend/src/scenes/billing/PurchaseCreditsModal.tsx index 5c2d36dc79a..60eb63fc2b0 100644 --- a/frontend/src/scenes/billing/PurchaseCreditsModal.tsx +++ b/frontend/src/scenes/billing/PurchaseCreditsModal.tsx @@ -8,6 +8,7 @@ import { LemonRadio } from 'lib/lemon-ui/LemonRadio' import { BillingGauge } from './BillingGauge' import { billingLogic } from './billingLogic' +import { DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD } from './CreditCTAHero' import { BillingGaugeItemKind } from './types' export const PurchaseCreditsModal = (): JSX.Element | null => { @@ -16,6 +17,8 @@ export const PurchaseCreditsModal = (): JSX.Element | null => { const { openSupportForm } = useActions(supportLogic) const creditInputValue: number = +creditForm.creditInput || 0 + const estimatedMonthlyCreditAmountUsd = + creditOverview.estimated_monthly_credit_amount_usd || DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD return ( showPurchaseCreditsModal(false)} @@ -56,7 +59,7 @@ export const PurchaseCreditsModal = (): JSX.Element | null => { Based on your usage, we think you'll use{' '} $ - {(+creditOverview.estimated_monthly_credit_amount_usd).toLocaleString('en-US', { + {(+estimatedMonthlyCreditAmountUsd).toLocaleString('en-US', { minimumFractionDigits: 0, maximumFractionDigits: 0, })} @@ -64,7 +67,7 @@ export const PurchaseCreditsModal = (): JSX.Element | null => { of credits per month, for a total of{' '} $ - {(+creditOverview.estimated_monthly_credit_amount_usd * 12).toLocaleString('en-US', { + {(+estimatedMonthlyCreditAmountUsd * 12).toLocaleString('en-US', { minimumFractionDigits: 0, maximumFractionDigits: 0, })} diff --git a/frontend/src/scenes/billing/billingLogic.tsx b/frontend/src/scenes/billing/billingLogic.tsx index 4db25f0fc36..ac78f13424b 100644 --- a/frontend/src/scenes/billing/billingLogic.tsx +++ b/frontend/src/scenes/billing/billingLogic.tsx @@ -18,6 +18,7 @@ import { userLogic } from 'scenes/userLogic' import { BillingPlanType, BillingProductV2Type, BillingType, ProductKey } from '~/types' import type { billingLogicType } from './billingLogicType' +import { DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD } from './CreditCTAHero' export const ALLOCATION_THRESHOLD_ALERT = 0.85 // Threshold to show warning of event usage near limit export const ALLOCATION_THRESHOLD_BLOCK = 1.2 // Threshold to block usage @@ -325,7 +326,7 @@ export const billingLogic = kea([ creditOverview: [ { eligible: false, - estimated_monthly_credit_amount_usd: 0, + estimated_monthly_credit_amount_usd: DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD, status: 'none', invoice_url: null, collection_method: null, @@ -340,7 +341,10 @@ export const billingLogic = kea([ if (!values.creditForm.creditInput) { actions.setCreditFormValue( 'creditInput', - Math.round(response.estimated_monthly_credit_amount_usd * 12) + Math.round( + (response.estimated_monthly_credit_amount_usd || + DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD) * 12 + ) ) } @@ -352,7 +356,7 @@ export const billingLogic = kea([ // Return default values if not subscribed return { eligible: false, - estimated_monthly_credit_amount_usd: 0, + estimated_monthly_credit_amount_usd: DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD, status: 'none', invoice_url: null, collection_method: null, @@ -531,7 +535,8 @@ export const billingLogic = kea([ posthog.capture('credits cta shown', { eligible: creditOverview.eligible, status: creditOverview.status, - estimated_monthly_credit_amount_usd: creditOverview.estimated_monthly_credit_amount_usd, + estimated_monthly_credit_amount_usd: + creditOverview.estimated_monthly_credit_amount_usd || DEFAULT_ESTIMATED_MONTHLY_CREDIT_AMOUNT_USD, }) }, toggleCreditCTAHeroDismissed: ({ isDismissed }) => { diff --git a/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx b/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx index 1b36477047c..3576303ebdd 100644 --- a/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx +++ b/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx @@ -1,14 +1,23 @@ -import { BindLogic } from 'kea' +import { IconArrowLeft } from '@posthog/icons' +import { BindLogic, useActions, useValues } from 'kea' +import { CopyToClipboardInline } from 'lib/components/CopyToClipboard' +import { DatabaseTableTree } from 'lib/components/DatabaseTableTree/DatabaseTableTree' +import { LemonButton } from 'lib/lemon-ui/LemonButton' import { useRef } from 'react' +import { Sidebar } from '~/layout/navigation-3000/components/Sidebar' +import { navigation3000Logic } from '~/layout/navigation-3000/navigationLogic' + +import { editorSceneLogic } from './editorSceneLogic' import { editorSizingLogic } from './editorSizingLogic' import { QueryWindow } from './QueryWindow' -import { SourceNavigator } from './SourceNavigator' export function EditorScene(): JSX.Element { const ref = useRef(null) const navigatorRef = useRef(null) const queryPaneRef = useRef(null) + const { activeNavbarItem } = useValues(navigation3000Logic) + const { sidebarOverlayOpen } = useValues(editorSceneLogic) const editorSizingLogicProps = { editorSceneRef: ref, @@ -28,9 +37,41 @@ export function EditorScene(): JSX.Element { return (
    - + {activeNavbarItem && ( + } + sidebarOverlayProps={{ isOpen: sidebarOverlayOpen }} + /> + )}
    ) } + +const EditorSidebarOverlay = (): JSX.Element => { + const { setSidebarOverlayOpen } = useActions(editorSceneLogic) + const { sidebarOverlayTreeItems, selectedSchema } = useValues(editorSceneLogic) + + return ( +
    +
    + } onClick={() => setSidebarOverlayOpen(false)} /> + {selectedSchema?.name && ( + + {selectedSchema?.name} + + )} +
    + +
    + ) +} diff --git a/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx b/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx index 8d28cf57614..10e36c436e7 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx @@ -1,6 +1,5 @@ import { useValues } from 'kea' import { Resizer } from 'lib/components/Resizer/Resizer' -import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { CodeEditor, CodeEditorProps } from 'lib/monaco/CodeEditor' import { AutoSizer } from 'react-virtualized/dist/es/AutoSizer' @@ -16,43 +15,44 @@ export function QueryPane(props: QueryPaneProps): JSX.Element { const { queryPaneHeight, queryPaneResizerProps } = useValues(editorSizingLogic) return ( -
    -
    - {props.promptError ? {props.promptError} : null} - - {({ height, width }) => ( - - )} - + <> +
    +
    + + {({ height, width }) => ( + + )} + +
    +
    - -
    + ) } diff --git a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx index b49acba9584..35a41c0f402 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx @@ -1,41 +1,42 @@ import { IconPlus, IconX } from '@posthog/icons' import { LemonButton } from '@posthog/lemon-ui' import clsx from 'clsx' -import { Uri } from 'monaco-editor' + +import { QueryTab } from './multitabEditorLogic' interface QueryTabsProps { - models: Uri[] - onClick: (model: Uri) => void - onClear: (model: Uri) => void + models: QueryTab[] + onClick: (model: QueryTab) => void + onClear: (model: QueryTab) => void onAdd: () => void - activeModelUri: Uri | null + activeModelUri: QueryTab | null } export function QueryTabs({ models, onClear, onClick, onAdd, activeModelUri }: QueryTabsProps): JSX.Element { return ( -
    - {models.map((model: Uri) => ( - + {models.map((model: QueryTab) => ( + 1 ? onClear : undefined} onClick={onClick} - active={activeModelUri?.path === model.path} + active={activeModelUri?.uri.path === model.uri.path} /> ))} - } /> + onAdd()} icon={} />
    ) } interface QueryTabProps { - model: Uri - onClick: (model: Uri) => void - onClear?: (model: Uri) => void + model: QueryTab + onClick: (model: QueryTab) => void + onClear?: (model: QueryTab) => void active: boolean } -function QueryTab({ model, active, onClear, onClick }: QueryTabProps): JSX.Element { +function QueryTabComponent({ model, active, onClear, onClick }: QueryTabProps): JSX.Element { return (
    diff --git a/frontend/src/scenes/data-warehouse/editor/SourceNavigator.tsx b/frontend/src/scenes/data-warehouse/editor/SourceNavigator.tsx deleted file mode 100644 index ca9f4991245..00000000000 --- a/frontend/src/scenes/data-warehouse/editor/SourceNavigator.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import { useValues } from 'kea' -import { Resizer } from 'lib/components/Resizer/Resizer' - -import { DatabaseTableTreeWithItems } from '../external/DataWarehouseTables' -import { editorSizingLogic } from './editorSizingLogic' -import { SchemaSearch } from './SchemaSearch' - -export function SourceNavigator(): JSX.Element { - const { sourceNavigatorWidth, sourceNavigatorResizerProps } = useValues(editorSizingLogic) - - return ( -
    - - - -
    - ) -} diff --git a/frontend/src/scenes/data-warehouse/editor/editorSceneLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSceneLogic.ts new file mode 100644 index 00000000000..175183d6020 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/editorSceneLogic.ts @@ -0,0 +1,56 @@ +import { actions, kea, path, reducers, selectors } from 'kea' +import { TreeItem } from 'lib/components/DatabaseTableTree/DatabaseTableTree' + +import { DatabaseSchemaDataWarehouseTable, DatabaseSchemaTable } from '~/queries/schema' +import { DataWarehouseSavedQuery } from '~/types' + +import type { editorSceneLogicType } from './editorSceneLogicType' + +export const editorSceneLogic = kea([ + path(['scenes', 'data-warehouse', 'editor', 'editorSceneLogic']), + actions({ + setSidebarOverlayOpen: (isOpen: boolean) => ({ isOpen }), + selectSchema: (schema: DatabaseSchemaDataWarehouseTable | DatabaseSchemaTable | DataWarehouseSavedQuery) => ({ + schema, + }), + }), + reducers({ + sidebarOverlayOpen: [ + false, + { + setSidebarOverlayOpen: (_, { isOpen }) => isOpen, + selectSchema: (_, { schema }) => schema !== null, + }, + ], + selectedSchema: [ + null as DatabaseSchemaDataWarehouseTable | DatabaseSchemaTable | DataWarehouseSavedQuery | null, + { + selectSchema: (_, { schema }) => schema, + }, + ], + }), + selectors({ + sidebarOverlayTreeItems: [ + (s) => [s.selectedSchema], + (selectedSchema): TreeItem[] => { + if (selectedSchema === null) { + return [] + } + if ('fields' in selectedSchema) { + return Object.values(selectedSchema.fields).map((field) => ({ + name: field.name, + type: field.type, + })) + } + + if ('columns' in selectedSchema) { + return Object.values(selectedSchema.columns).map((column) => ({ + name: column.name, + type: column.type, + })) + } + return [] + }, + ], + }), +]) diff --git a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts new file mode 100644 index 00000000000..8239bd16655 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts @@ -0,0 +1,203 @@ +import Fuse from 'fuse.js' +import { connect, kea, path, selectors } from 'kea' +import { router } from 'kea-router' +import { subscriptions } from 'kea-subscriptions' +import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' +import { sceneLogic } from 'scenes/sceneLogic' +import { Scene } from 'scenes/sceneTypes' +import { urls } from 'scenes/urls' + +import { navigation3000Logic } from '~/layout/navigation-3000/navigationLogic' +import { FuseSearchMatch } from '~/layout/navigation-3000/sidebars/utils' +import { SidebarCategory } from '~/layout/navigation-3000/types' +import { DatabaseSchemaDataWarehouseTable, DatabaseSchemaTable } from '~/queries/schema' +import { DataWarehouseSavedQuery, PipelineTab } from '~/types' + +import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' +import { editorSceneLogic } from './editorSceneLogic' +import type { editorSidebarLogicType } from './editorSidebarLogicType' +import { multitabEditorLogic } from './multitabEditorLogic' + +const dataWarehouseTablesfuse = new Fuse([], { + keys: [{ name: 'name', weight: 2 }], + threshold: 0.3, + ignoreLocation: true, + includeMatches: true, +}) + +const posthogTablesfuse = new Fuse([], { + keys: [{ name: 'name', weight: 2 }], + threshold: 0.3, + ignoreLocation: true, + includeMatches: true, +}) + +const savedQueriesfuse = new Fuse([], { + keys: [{ name: 'name', weight: 2 }], + threshold: 0.3, + ignoreLocation: true, + includeMatches: true, +}) + +export const editorSidebarLogic = kea([ + path(['data-warehouse', 'editor', 'editorSidebarLogic']), + connect({ + values: [ + sceneLogic, + ['activeScene', 'sceneParams'], + dataWarehouseViewsLogic, + ['dataWarehouseSavedQueries', 'dataWarehouseSavedQueryMapById', 'dataWarehouseSavedQueriesLoading'], + databaseTableListLogic, + ['posthogTables', 'dataWarehouseTables', 'databaseLoading', 'views', 'viewsMapById'], + ], + actions: [editorSceneLogic, ['selectSchema'], dataWarehouseViewsLogic, ['deleteDataWarehouseSavedQuery']], + }), + selectors(({ actions }) => ({ + contents: [ + (s) => [ + s.relevantSavedQueries, + s.dataWarehouseSavedQueriesLoading, + s.relevantPosthogTables, + s.relevantDataWarehouseTables, + s.databaseLoading, + ], + ( + relevantSavedQueries, + dataWarehouseSavedQueriesLoading, + relevantPosthogTables, + relevantDataWarehouseTables, + databaseLoading + ) => [ + { + key: 'data-warehouse-sources', + noun: ['source', 'external source'], + loading: databaseLoading, + items: relevantDataWarehouseTables.map(([table, matches]) => ({ + key: table.id, + name: table.name, + url: '', + searchMatch: matches + ? { + matchingFields: matches.map((match) => match.key), + nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, + } + : null, + onClick: () => { + actions.selectSchema(table) + }, + })), + onAdd: () => { + router.actions.push(urls.pipeline(PipelineTab.Sources)) + }, + } as SidebarCategory, + { + key: 'data-warehouse-tables', + noun: ['table', 'tables'], + loading: databaseLoading, + items: relevantPosthogTables.map(([table, matches]) => ({ + key: table.id, + name: table.name, + url: '', + searchMatch: matches + ? { + matchingFields: matches.map((match) => match.key), + nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, + } + : null, + onClick: () => { + actions.selectSchema(table) + }, + })), + } as SidebarCategory, + { + key: 'data-warehouse-views', + noun: ['view', 'views'], + loading: dataWarehouseSavedQueriesLoading, + items: relevantSavedQueries.map(([savedQuery, matches]) => ({ + key: savedQuery.id, + name: savedQuery.name, + url: '', + searchMatch: matches + ? { + matchingFields: matches.map((match) => match.key), + nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, + } + : null, + onClick: () => { + actions.selectSchema(savedQuery) + }, + menuItems: [ + { + label: 'Edit view definition', + onClick: () => { + multitabEditorLogic({ + key: `hogQLQueryEditor/${router.values.location.pathname}`, + }).actions.createTab(savedQuery.query.query, savedQuery) + }, + }, + { + label: 'Delete', + status: 'danger', + onClick: () => { + actions.deleteDataWarehouseSavedQuery(savedQuery.id) + }, + }, + ], + })), + } as SidebarCategory, + ], + ], + activeListItemKey: [ + (s) => [s.activeScene, s.sceneParams], + (activeScene, sceneParams): [string, number] | null => { + return activeScene === Scene.DataWarehouse && sceneParams.params.id + ? ['saved-queries', parseInt(sceneParams.params.id)] + : null + }, + ], + relevantDataWarehouseTables: [ + (s) => [s.dataWarehouseTables, navigation3000Logic.selectors.searchTerm], + (dataWarehouseTables, searchTerm): [DatabaseSchemaDataWarehouseTable, FuseSearchMatch[] | null][] => { + if (searchTerm) { + return dataWarehouseTablesfuse + .search(searchTerm) + .map((result) => [result.item, result.matches as FuseSearchMatch[]]) + } + return dataWarehouseTables.map((table) => [table, null]) + }, + ], + relevantPosthogTables: [ + (s) => [s.posthogTables, navigation3000Logic.selectors.searchTerm], + (posthogTables, searchTerm): [DatabaseSchemaTable, FuseSearchMatch[] | null][] => { + if (searchTerm) { + return posthogTablesfuse + .search(searchTerm) + .map((result) => [result.item, result.matches as FuseSearchMatch[]]) + } + return posthogTables.map((table) => [table, null]) + }, + ], + relevantSavedQueries: [ + (s) => [s.dataWarehouseSavedQueries, navigation3000Logic.selectors.searchTerm], + (dataWarehouseSavedQueries, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { + if (searchTerm) { + return savedQueriesfuse + .search(searchTerm) + .map((result) => [result.item, result.matches as FuseSearchMatch[]]) + } + return dataWarehouseSavedQueries.map((savedQuery) => [savedQuery, null]) + }, + ], + })), + subscriptions({ + dataWarehouseTables: (dataWarehouseTables) => { + dataWarehouseTablesfuse.setCollection(dataWarehouseTables) + }, + posthogTables: (posthogTables) => { + posthogTablesfuse.setCollection(posthogTables) + }, + dataWarehouseSavedQueries: (dataWarehouseSavedQueries) => { + savedQueriesfuse.setCollection(dataWarehouseSavedQueries) + }, + }), +]) diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx index 7a4a3d4e84e..cad1c656c0b 100644 --- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx @@ -1,6 +1,6 @@ import { Monaco } from '@monaco-editor/react' -import { LemonDialog, LemonInput } from '@posthog/lemon-ui' -import { actions, kea, listeners, path, props, propsChanged, reducers, selectors } from 'kea' +import { LemonDialog, LemonInput, lemonToast } from '@posthog/lemon-ui' +import { actions, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' import { LemonField } from 'lib/lemon-ui/LemonField' import { ModelMarker } from 'lib/monaco/codeEditorLogic' @@ -9,6 +9,7 @@ import { editor, MarkerSeverity, Uri } from 'monaco-editor' import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' import { performQuery } from '~/queries/query' import { HogLanguage, HogQLMetadata, HogQLMetadataResponse, HogQLNotice, HogQLQuery, NodeKind } from '~/queries/schema' +import { DataWarehouseSavedQuery } from '~/types' import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' import type { multitabEditorLogicType } from './multitabEditorLogicType' @@ -22,29 +23,41 @@ export interface MultitabEditorLogicProps { export const editorModelsStateKey = (key: string | number): string => `${key}/editorModelQueries` export const activemodelStateKey = (key: string | number): string => `${key}/activeModelUri` +export interface QueryTab { + uri: Uri + view?: DataWarehouseSavedQuery +} + export const multitabEditorLogic = kea([ path(['data-warehouse', 'editor', 'multitabEditorLogic']), props({} as MultitabEditorLogicProps), + key((props) => props.key), + connect({ + actions: [ + dataWarehouseViewsLogic, + ['deleteDataWarehouseSavedQuerySuccess', 'createDataWarehouseSavedQuerySuccess'], + ], + }), actions({ setQueryInput: (queryInput: string) => ({ queryInput }), updateState: true, runQuery: (queryOverride?: string) => ({ queryOverride }), setActiveQuery: (query: string) => ({ query }), - setTabs: (tabs: Uri[]) => ({ tabs }), - addTab: (tab: Uri) => ({ tab }), - createTab: () => null, - deleteTab: (tab: Uri) => ({ tab }), - removeTab: (tab: Uri) => ({ tab }), - selectTab: (tab: Uri) => ({ tab }), + setTabs: (tabs: QueryTab[]) => ({ tabs }), + addTab: (tab: QueryTab) => ({ tab }), + createTab: (query?: string, view?: DataWarehouseSavedQuery) => ({ query, view }), + deleteTab: (tab: QueryTab) => ({ tab }), + removeTab: (tab: QueryTab) => ({ tab }), + selectTab: (tab: QueryTab) => ({ tab }), setLocalState: (key: string, value: any) => ({ key, value }), initialize: true, saveAsView: true, - saveAsViewSuccess: (name: string) => ({ name }), + saveAsViewSubmit: (name: string) => ({ name }), reloadMetadata: true, setMetadata: (query: string, metadata: HogQLMetadataResponse) => ({ query, metadata }), }), - propsChanged(({ actions }, oldProps) => { - if (!oldProps.monaco && !oldProps.editor) { + propsChanged(({ actions, props }, oldProps) => { + if (!oldProps.monaco && !oldProps.editor && props.monaco && props.editor) { actions.initialize() } }), @@ -62,20 +75,26 @@ export const multitabEditorLogic = kea([ }, ], activeModelUri: [ - null as Uri | null, + null as QueryTab | null, { selectTab: (_, { tab }) => tab, }, ], + editingView: [ + null as DataWarehouseSavedQuery | null, + { + selectTab: (_, { tab }) => tab.view ?? null, + }, + ], allTabs: [ - [] as Uri[], + [] as QueryTab[], { addTab: (state, { tab }) => { const newTabs = [...state, tab] return newTabs }, removeTab: (state, { tab: tabToRemove }) => { - const newModels = state.filter((tab) => tab.toString() !== tabToRemove.toString()) + const newModels = state.filter((tab) => tab.uri.toString() !== tabToRemove.uri.toString()) return newModels }, setTabs: (_, { tabs }) => tabs, @@ -130,25 +149,32 @@ export const multitabEditorLogic = kea([ }, ], })), - listeners(({ values, props, actions }) => ({ - createTab: () => { + listeners(({ values, props, actions, asyncActions }) => ({ + createTab: ({ query = '', view }) => { let currentModelCount = 1 - const allNumbers = values.allTabs.map((tab) => parseInt(tab.path.split('/').pop() || '0')) + const allNumbers = values.allTabs.map((tab) => parseInt(tab.uri.path.split('/').pop() || '0')) while (allNumbers.includes(currentModelCount)) { currentModelCount++ } if (props.monaco) { const uri = props.monaco.Uri.parse(currentModelCount.toString()) - const model = props.monaco.editor.createModel('', 'hogQL', uri) + const model = props.monaco.editor.createModel(query, 'hogQL', uri) props.editor?.setModel(model) - actions.addTab(uri) - actions.selectTab(uri) + actions.addTab({ + uri, + view, + }) + actions.selectTab({ + uri, + view, + }) const queries = values.allTabs.map((tab) => { return { - query: props.monaco?.editor.getModel(tab)?.getValue() || '', - path: tab.path.split('/').pop(), + query: props.monaco?.editor.getModel(tab.uri)?.getValue() || '', + path: tab.uri.path.split('/').pop(), + view: uri.path === tab.uri.path ? view : tab.view, } }) actions.setLocalState(editorModelsStateKey(props.key), JSON.stringify(queries)) @@ -156,18 +182,20 @@ export const multitabEditorLogic = kea([ }, selectTab: ({ tab }) => { if (props.monaco) { - const model = props.monaco.editor.getModel(tab) + const model = props.monaco.editor.getModel(tab.uri) props.editor?.setModel(model) } - const path = tab.path.split('/').pop() + const path = tab.uri.path.split('/').pop() path && actions.setLocalState(activemodelStateKey(props.key), path) }, deleteTab: ({ tab: tabToRemove }) => { if (props.monaco) { - const model = props.monaco.editor.getModel(tabToRemove) - if (tabToRemove == values.activeModelUri) { - const indexOfModel = values.allTabs.findIndex((tab) => tab.toString() === tabToRemove.toString()) + const model = props.monaco.editor.getModel(tabToRemove.uri) + if (tabToRemove.uri.toString() === values.activeModelUri?.uri.toString()) { + const indexOfModel = values.allTabs.findIndex( + (tab) => tab.uri.toString() === tabToRemove.uri.toString() + ) const nextModel = values.allTabs[indexOfModel + 1] || values.allTabs[indexOfModel - 1] || values.allTabs[0] // there will always be one actions.selectTab(nextModel) @@ -176,8 +204,9 @@ export const multitabEditorLogic = kea([ actions.removeTab(tabToRemove) const queries = values.allTabs.map((tab) => { return { - query: props.monaco?.editor.getModel(tab)?.getValue() || '', - path: tab.path.split('/').pop(), + query: props.monaco?.editor.getModel(tab.uri)?.getValue() || '', + path: tab.uri.path.split('/').pop(), + view: tab.view, } }) actions.setLocalState(editorModelsStateKey(props.key), JSON.stringify(queries)) @@ -197,14 +226,17 @@ export const multitabEditorLogic = kea([ }) const models = JSON.parse(allModelQueries || '[]') - const newModels: Uri[] = [] + const newModels: QueryTab[] = [] models.forEach((model: Record) => { if (props.monaco) { const uri = props.monaco.Uri.parse(model.path) const newModel = props.monaco.editor.createModel(model.query, 'hogQL', uri) props.editor?.setModel(newModel) - newModels.push(uri) + newModels.push({ + uri, + view: model.view, + }) } }) @@ -221,9 +253,17 @@ export const multitabEditorLogic = kea([ actions.setQueryInput(val) actions.runQuery() } - uri && actions.selectTab(uri) + const activeView = newModels.find((tab) => tab.uri.path.split('/').pop() === activeModelUri)?.view + + uri && + actions.selectTab({ + uri, + view: activeView, + }) } else if (newModels.length) { - actions.selectTab(newModels[0]) + actions.selectTab({ + uri: newModels[0].uri, + }) } } else { const model = props.editor?.getModel() @@ -240,13 +280,23 @@ export const multitabEditorLogic = kea([ await breakpoint(100) const queries = values.allTabs.map((model) => { return { - query: props.monaco?.editor.getModel(model)?.getValue() || '', - path: model.path.split('/').pop(), + query: props.monaco?.editor.getModel(model.uri)?.getValue() || '', + path: model.uri.path.split('/').pop(), + view: model.view, } }) localStorage.setItem(editorModelsStateKey(props.key), JSON.stringify(queries)) }, runQuery: ({ queryOverride }) => { + if (values.activeQuery === queryOverride || values.activeQuery === values.queryInput) { + dataNodeLogic({ + key: values.activeTabKey, + query: { + kind: NodeKind.HogQLQuery, + query: queryOverride || values.queryInput, + }, + }).actions.loadData(true) + } actions.setActiveQuery(queryOverride || values.queryInput) }, saveAsView: async () => { @@ -261,10 +311,13 @@ export const multitabEditorLogic = kea([ errors: { viewName: (name) => (!name ? 'You must enter a name' : undefined), }, - onSubmit: ({ viewName }) => actions.saveAsViewSuccess(viewName), + onSubmit: async ({ viewName }) => { + await asyncActions.saveAsViewSubmit(viewName) + }, + shouldAwaitSubmit: true, }) }, - saveAsViewSuccess: async ({ name }) => { + saveAsViewSubmit: async ({ name }) => { const query: HogQLQuery = { kind: NodeKind.HogQLQuery, query: values.queryInput, @@ -290,11 +343,34 @@ export const multitabEditorLogic = kea([ breakpoint() actions.setMetadata(query, response) }, + deleteDataWarehouseSavedQuerySuccess: ({ payload: viewId }) => { + const tabToRemove = values.allTabs.find((tab) => tab.view?.id === viewId) + if (tabToRemove) { + actions.deleteTab(tabToRemove) + } + lemonToast.success('View deleted') + }, + createDataWarehouseSavedQuerySuccess: ({ dataWarehouseSavedQueries, payload: view }) => { + const newView = view && dataWarehouseSavedQueries.find((v) => v.name === view.name) + if (newView) { + const newTabs = values.allTabs.map((tab) => ({ + ...tab, + view: tab.uri.path === values.activeModelUri?.uri.path ? newView : tab.view, + })) + const newTab = newTabs.find((tab) => tab.uri.path === values.activeModelUri?.uri.path) + actions.setTabs(newTabs) + newTab && actions.selectTab(newTab) + actions.updateState() + } + }, + updateDataWarehouseSavedQuerySuccess: () => { + lemonToast.success('View updated') + }, })), subscriptions(({ props, actions, values }) => ({ activeModelUri: (activeModelUri) => { if (props.monaco) { - const _model = props.monaco.editor.getModel(activeModelUri) + const _model = props.monaco.editor.getModel(activeModelUri.uri) const val = _model?.getValue() actions.setQueryInput(val ?? '') actions.runQuery() @@ -313,7 +389,7 @@ export const multitabEditorLogic = kea([ }, })), selectors({ - activeTabKey: [(s) => [s.activeModelUri], (activeModelUri) => `hogQLQueryEditor/${activeModelUri?.path}`], + activeTabKey: [(s) => [s.activeModelUri], (activeModelUri) => `hogQLQueryEditor/${activeModelUri?.uri.path}`], isValidView: [(s) => [s.metadata], (metadata) => !!(metadata && metadata[1]?.isValidView)], hasErrors: [ (s) => [s.modelMarkers], diff --git a/frontend/src/scenes/data-warehouse/editor/sourceNavigatorLogic.ts b/frontend/src/scenes/data-warehouse/editor/sourceNavigatorLogic.ts deleted file mode 100644 index 406ae6e0d4e..00000000000 --- a/frontend/src/scenes/data-warehouse/editor/sourceNavigatorLogic.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { kea } from 'kea' - -import type { sourceNavigatorLogicType } from './sourceNavigatorLogicType' - -export const sourceNavigatorLogic = kea({ - path: ['scenes', 'data-warehouse', 'editor', 'sourceNavigatorLogic'], - actions: { - setWidth: (width: number) => ({ width }), - }, - reducers: { - navigatorWidth: [ - 200, - { - setWidth: (_, { width }: { width: number }) => width, - }, - ], - }, -}) diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx index 10df0ed6c1f..37c744e633d 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx @@ -46,7 +46,7 @@ export const dataWarehouseViewsLogic = kea([ await api.dataWarehouseSavedQueries.delete(viewId) return values.dataWarehouseSavedQueries.filter((view) => view.id !== viewId) }, - updateDataWarehouseSavedQuery: async (view: DatabaseSchemaViewTable) => { + updateDataWarehouseSavedQuery: async (view: Partial & { id: string }) => { const newView = await api.dataWarehouseSavedQueries.update(view.id, view) return values.dataWarehouseSavedQueries.map((savedQuery) => { if (savedQuery.id === view.id) { diff --git a/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx index e4e36cdc54e..8df7e27c760 100644 --- a/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx +++ b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx @@ -43,7 +43,7 @@ function UpdateSourceConnectionFormContainer(props: UpdateSourceConnectionFormCo <> Overwrite your existing configuration here
    - +
    , - flag: '!ENVIRONMENTS', + flag: ['ARTIFICIAL_HOG', '!ENVIRONMENTS'], }, { id: 'snippet', diff --git a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx index fc2e0cbf7ce..1b30b0bc834 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx @@ -252,6 +252,8 @@ function UrlConfigSection({

    {description}

    +

    `${title} is only available for JavaScript Web.`

    + {props.isAddFormVisible && ( )} @@ -335,6 +337,7 @@ function EventTriggerOptions(): JSX.Element | null { Session recording will be started immediately before PostHog queues any of these events to be sent to the backend.

    +

    Event emitted is only available for JavaScript Web.

    { @@ -482,6 +485,7 @@ export function SessionRecordingIngestionSettings(): JSX.Element | null { useful if you want to reduce the amount of data you collect. 100% means all sessions will be collected. 50% means roughly half of sessions will be collected.

    +

    Sampling is only available for JavaScript Web.

    )} {recordingDurationMinimumFeatureEnabled && ( @@ -502,6 +506,7 @@ export function SessionRecordingIngestionSettings(): JSX.Element | null { value are collected. This helps you avoid collecting sessions that are too short to be useful.

    +

    Minimum session duration is only available for JavaScript Web.

    )} diff --git a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx index 6fbc1c0dac2..805743eb259 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx @@ -24,6 +24,13 @@ function LogCaptureSettings(): JSX.Element { This setting controls if browser console logs will be captured as a part of recordings. The console logs will be shown in the recording player to help you debug any issues.

    +

    + Log capture is also available for{' '} + + Mobile session replay + {' '} + , where they can be configured directly in code. +

    { @@ -51,6 +58,7 @@ function CanvasCaptureSettings(): JSX.Element | null { There is no way to mask canvas elements right now so please make sure they are free of PII.

    +

    Canvas capture is only available for JavaScript Web.

    { @@ -125,6 +133,13 @@ export function NetworkCaptureSettings(): JSX.Element { Learn how to mask header and payload values in our docs

    +

    + Network capture is also available for{' '} + + Mobile session replay + {' '} + , where they can be configured directly in code. +

    @@ -208,6 +223,7 @@ export function ReplayAuthorizedDomains(): JSX.Element { Use the settings below to restrict the domains where recordings will be captured. If no domains are selected, then there will be no domain restriction.

    +

    Authorized domains is only available for JavaScript Web.

    Domains and wildcard subdomains are allowed (e.g. https://*.example.com). However, wildcarded top-level domains cannot be used (for security reasons). diff --git a/frontend/src/scenes/settings/settingsLogic.ts b/frontend/src/scenes/settings/settingsLogic.ts index 3c825e75690..b2707e0ce71 100644 --- a/frontend/src/scenes/settings/settingsLogic.ts +++ b/frontend/src/scenes/settings/settingsLogic.ts @@ -74,16 +74,9 @@ export const settingsLogic = kea([ }, ], sections: [ - (s) => [s.featureFlags], - (featureFlags): SettingSection[] => { - const sections = SETTINGS_MAP.filter((x) => { - const isFlagConditionMet = !x.flag - ? true // No flag condition - : x.flag.startsWith('!') - ? !featureFlags[FEATURE_FLAGS[x.flag.slice(1)]] // Negated flag condition (!-prefixed) - : featureFlags[FEATURE_FLAGS[x.flag]] // Regular flag condition - return isFlagConditionMet - }) + (s) => [s.doesMatchFlags, s.featureFlags], + (doesMatchFlags, featureFlags): SettingSection[] => { + const sections = SETTINGS_MAP.filter(doesMatchFlags) if (!featureFlags[FEATURE_FLAGS.ENVIRONMENTS]) { return sections .filter((section) => section.level !== 'project') @@ -108,24 +101,8 @@ export const settingsLogic = kea([ }, ], settings: [ - (s) => [ - s.selectedLevel, - s.selectedSectionId, - s.sections, - s.settingId, - s.featureFlags, - s.hasAvailableFeature, - s.preflight, - ], - ( - selectedLevel, - selectedSectionId, - sections, - settingId, - featureFlags, - hasAvailableFeature, - preflight - ): Setting[] => { + (s) => [s.selectedLevel, s.selectedSectionId, s.sections, s.settingId, s.doesMatchFlags, s.preflight], + (selectedLevel, selectedSectionId, sections, settingId, doesMatchFlags, preflight): Setting[] => { let settings: Setting[] = [] if (selectedSectionId) { @@ -140,29 +117,40 @@ export const settingsLogic = kea([ return settings.filter((x) => x.id === settingId) } - return settings - .filter((x) => { - const isFlagConditionMet = !x.flag - ? true // No flag condition - : x.flag.startsWith('!') - ? !featureFlags[FEATURE_FLAGS[x.flag.slice(1)]] // Negated flag condition (!-prefixed) - : featureFlags[FEATURE_FLAGS[x.flag]] // Regular flag condition - if (x.flag && x.features) { - return x.features.some((feat) => hasAvailableFeature(feat)) || isFlagConditionMet - } else if (x.features) { - return x.features.some((feat) => hasAvailableFeature(feat)) - } else if (x.flag) { - return isFlagConditionMet - } - + return settings.filter((x) => { + if (!doesMatchFlags(x)) { + return false + } + if (x.hideOn?.includes(Realm.Cloud) && preflight?.cloud) { + return false + } + return true + }) + }, + ], + doesMatchFlags: [ + (s) => [s.featureFlags], + (featureFlags) => { + return (x: Pick) => { + if (!x.flag) { + // No flag condition return true - }) - .filter((x) => { - if (x.hideOn?.includes(Realm.Cloud) && preflight?.cloud) { + } + const flagsArray = Array.isArray(x.flag) ? x.flag : [x.flag] + for (const flagCondition of flagsArray) { + const flag = ( + flagCondition.startsWith('!') ? flagCondition.slice(1) : flagCondition + ) as keyof typeof FEATURE_FLAGS + let isConditionMet = featureFlags[FEATURE_FLAGS[flag]] + if (flagCondition.startsWith('!')) { + isConditionMet = !isConditionMet // Negated flag condition (!-prefixed) + } + if (!isConditionMet) { return false } - return true - }) + } + return true + } }, ], }), diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index 3b3f17218c8..fa210a263e2 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -1,6 +1,6 @@ import { EitherMembershipLevel, FEATURE_FLAGS } from 'lib/constants' -import { AvailableFeature, Realm } from '~/types' +import { Realm } from '~/types' export type SettingsLogicProps = { logicKey?: string @@ -111,21 +111,16 @@ export type Setting = { /** * Feature flag to gate the setting being shown. * If prefixed with !, the condition is inverted - the setting will only be shown if the is flag false. + * When an array is provided, the setting will be shown if ALL of the conditions are met. */ - flag?: FeatureFlagKey | `!${FeatureFlagKey}` - features?: AvailableFeature[] + flag?: FeatureFlagKey | `!${FeatureFlagKey}` | (FeatureFlagKey | `!${FeatureFlagKey}`)[] hideOn?: Realm[] } -export type SettingSection = { +export interface SettingSection extends Pick { id: SettingSectionId title: string level: SettingLevelId settings: Setting[] - /** - * Feature flag to gate the section being shown. - * If prefixed with !, the condition is inverted - the section will only be shown if the is flag false. - */ - flag?: FeatureFlagKey | `!${FeatureFlagKey}` minimumAccessLevel?: EitherMembershipLevel } diff --git a/hogvm/__tests__/__snapshots__/crypto.hoge b/hogvm/__tests__/__snapshots__/crypto.hoge index e273437821c..19b01bd7a3c 100644 --- a/hogvm/__tests__/__snapshots__/crypto.hoge +++ b/hogvm/__tests__/__snapshots__/crypto.hoge @@ -1,4 +1,5 @@ ["_H", 1, 32, "this is a secure string", 32, "string:", 36, 0, 2, "print", 2, 35, 32, "md5Hex(string):", 36, 0, 2, -"md5Hex", 1, 2, "print", 2, 35, 32, "sha256Hex(string):", 36, 0, 2, "sha256Hex", 1, 2, "print", 2, 35, 32, "1", 32, -"string", 32, "more", 32, "keys", 43, 4, 32, "data:", 36, 1, 2, "print", 2, 35, 32, "sha256HmacChainHex(data):", 36, 1, -2, "sha256HmacChainHex", 1, 2, "print", 2, 35, 35, 35] +"md5Hex", 1, 2, "print", 2, 35, 32, "md5Hex(null):", 31, 2, "md5Hex", 1, 2, "print", 2, 35, 32, "sha256Hex(string):", +36, 0, 2, "sha256Hex", 1, 2, "print", 2, 35, 32, "sha256Hex(null):", 31, 2, "sha256Hex", 1, 2, "print", 2, 35, 32, "1", +32, "string", 32, "more", 32, "keys", 43, 4, 32, "data:", 36, 1, 2, "print", 2, 35, 32, "sha256HmacChainHex(data):", 36, +1, 2, "sha256HmacChainHex", 1, 2, "print", 2, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/crypto.stdout b/hogvm/__tests__/__snapshots__/crypto.stdout index dc98f5fe5e8..919fc57dc55 100644 --- a/hogvm/__tests__/__snapshots__/crypto.stdout +++ b/hogvm/__tests__/__snapshots__/crypto.stdout @@ -1,5 +1,7 @@ string: this is a secure string md5Hex(string): e7b466647ea215dbe59b00c756560911 +md5Hex(null): null sha256Hex(string): 5216c0931310b31737ef30353830c234901283544e934f54eb75f622cfb86c9d +sha256Hex(null): null data: ['1', 'string', 'more', 'keys'] sha256HmacChainHex(data): 826820d7eeca97f26ca18096be85fed346f6fd9cc18d64e72c935bea3450dbd9 diff --git a/hogvm/__tests__/crypto.hog b/hogvm/__tests__/crypto.hog index 10e98eb688b..bfff560e3c7 100644 --- a/hogvm/__tests__/crypto.hog +++ b/hogvm/__tests__/crypto.hog @@ -1,7 +1,9 @@ let string := 'this is a secure string' print('string:', string) print('md5Hex(string):', md5Hex(string)) +print('md5Hex(null):', md5Hex(null)) print('sha256Hex(string):', sha256Hex(string)) +print('sha256Hex(null):', sha256Hex(null)) let data := ['1', 'string', 'more', 'keys'] print('data:', data) diff --git a/hogvm/python/stl/crypto.py b/hogvm/python/stl/crypto.py index e399c5821e0..77bde95f180 100644 --- a/hogvm/python/stl/crypto.py +++ b/hogvm/python/stl/crypto.py @@ -2,11 +2,15 @@ import hashlib import hmac -def md5Hex(data: str) -> str: +def md5Hex(data: str | None) -> str | None: + if data is None: + return None return hashlib.md5(data.encode()).hexdigest() -def sha256Hex(data: str) -> str: +def sha256Hex(data: str | None) -> str | None: + if data is None: + return None return hashlib.sha256(data.encode()).hexdigest() diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index f48fc844452..669ca2c504c 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.59", + "version": "1.0.60", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "source": "src/index.ts", diff --git a/hogvm/typescript/src/stl/crypto.ts b/hogvm/typescript/src/stl/crypto.ts index 3de66abd579..3d9b1e92e4a 100644 --- a/hogvm/typescript/src/stl/crypto.ts +++ b/hogvm/typescript/src/stl/crypto.ts @@ -1,6 +1,7 @@ import { ExecOptions } from '../types' -export function sha256Hex(data: string, options?: ExecOptions): string { +export function sha256Hex(data: string | null, options?: ExecOptions): string | null { + if (data === null) { return null } const crypto = options?.external?.crypto if (!crypto) { throw new Error('The crypto module is required for "sha256Hex" to work.') @@ -8,7 +9,8 @@ export function sha256Hex(data: string, options?: ExecOptions): string { return crypto.createHash('sha256').update(data).digest('hex') } -export function md5Hex(data: string, options?: ExecOptions): string { +export function md5Hex(data: string | null, options?: ExecOptions): string | null { + if (data === null) { return null } const crypto = options?.external?.crypto if (!crypto) { throw new Error('The crypto module is required for "md5Hex" to work.') diff --git a/package.json b/package.json index d64adbb2de4..a1407c71269 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "@medv/finder": "^3.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.6.0", - "@posthog/hogvm": "^1.0.59", + "@posthog/hogvm": "^1.0.60", "@posthog/icons": "0.9.1", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", @@ -154,7 +154,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.187.0", + "posthog-js": "1.187.2", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/package.json b/plugin-server/package.json index a24ecdbf06d..e3ab517f5eb 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -54,7 +54,7 @@ "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", "@posthog/cyclotron": "file:../rust/cyclotron-node", - "@posthog/hogvm": "^1.0.59", + "@posthog/hogvm": "^1.0.60", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index 088161d2bac..8cef4f687b2 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -47,8 +47,8 @@ dependencies: specifier: file:../rust/cyclotron-node version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.59 - version: 1.0.59(luxon@3.4.4) + specifier: ^1.0.60 + version: 1.0.60(luxon@3.4.4) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3119,8 +3119,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.59(luxon@3.4.4): - resolution: {integrity: sha512-4KJfCXUhK7x5Wm3pheKWDmrbQ0y1lWlLWdVEjocdjSy3wOS8hQQqaFAVEKZs7hfk9pZqvNFh2UPgD4ccpwUQjA==} + /@posthog/hogvm@1.0.60(luxon@3.4.4): + resolution: {integrity: sha512-W0FTorn5FqIaNQCMTXbNi1dJSphe/UEztDTXIhwsWLNsSO7haF3xx8JSp7vowo6R432ExjPPoIFT1gtRVV17kQ==} peerDependencies: luxon: ^3.4.4 dependencies: diff --git a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts index 25b3d77d128..dc5cf6d8df1 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts @@ -259,7 +259,7 @@ export class EventPipelineRunner { event.team_id ) - if (event.event === '$exception' && event.team_id == 2) { + if (event.event === '$exception' && !event.properties?.hasOwnProperty('$sentry_event_id')) { const [exceptionAck] = await this.runStep( produceExceptionSymbolificationEventStep, [this, rawEvent], diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts index 2fd02a018be..6e54ec8f3c5 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts @@ -397,7 +397,7 @@ describe('EventPipelineRunner', () => { 'prepareEventStep', 'extractHeatmapDataStep', 'createEventStep', - 'emitEventStep', + 'produceExceptionSymbolificationEventStep', ]) }) }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3d7b0a4db4a..f789b9f1710 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -50,8 +50,8 @@ dependencies: specifier: 4.6.0 version: 4.6.0(monaco-editor@0.49.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/hogvm': - specifier: ^1.0.59 - version: 1.0.59(luxon@3.5.0) + specifier: ^1.0.60 + version: 1.0.60(luxon@3.5.0) '@posthog/icons': specifier: 0.9.1 version: 0.9.1(react-dom@18.2.0)(react@18.2.0) @@ -284,8 +284,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.187.0 - version: 1.187.0 + specifier: 1.187.2 + version: 1.187.2 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -392,7 +392,7 @@ dependencies: optionalDependencies: fsevents: specifier: ^2.3.2 - version: 2.3.3 + version: 2.3.2 devDependencies: '@babel/core': @@ -5418,8 +5418,8 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/hogvm@1.0.59(luxon@3.5.0): - resolution: {integrity: sha512-4KJfCXUhK7x5Wm3pheKWDmrbQ0y1lWlLWdVEjocdjSy3wOS8hQQqaFAVEKZs7hfk9pZqvNFh2UPgD4ccpwUQjA==} + /@posthog/hogvm@1.0.60(luxon@3.5.0): + resolution: {integrity: sha512-W0FTorn5FqIaNQCMTXbNi1dJSphe/UEztDTXIhwsWLNsSO7haF3xx8JSp7vowo6R432ExjPPoIFT1gtRVV17kQ==} peerDependencies: luxon: ^3.4.4 dependencies: @@ -13142,7 +13142,6 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true - dev: true optional: true /fsevents@2.3.3: @@ -17789,8 +17788,8 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.187.0: - resolution: {integrity: sha512-6AlM7jtpPTc+jncxQAddukUWka1T9ZCCySY+NAAQqy5zXHSvLb5g/dLWdRzhlxWiSSYDmLHBTsAhIngbGQJ6MA==} + /posthog-js@1.187.2: + resolution: {integrity: sha512-IGKsZ7M4AYACm5I6gGGFrv9kR/MOnVYw11XFYCLk363n0nq+ghwenoW1jJVL9gZLGKiMsConUR8rG2DD2OMKyg==} dependencies: core-js: 3.39.0 fflate: 0.4.8 @@ -18407,7 +18406,7 @@ packages: react: '>=15' dependencies: react: 18.2.0 - unlayer-types: 1.157.0 + unlayer-types: 1.167.0 dev: false /react-error-boundary@3.1.4(react@18.2.0): @@ -20954,8 +20953,8 @@ packages: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} - /unlayer-types@1.157.0: - resolution: {integrity: sha512-D9xnoeQREgUxEkaFG3q90CVicmysu8REVIkQA3alHaTG3JM+LPzdq20vjrd6V4iGuvvUD8L5EDQybSsSRS/k+g==} + /unlayer-types@1.167.0: + resolution: {integrity: sha512-H3Qq6WnC4u8hy2Qt+uueUaJkKdtTwv1V8FV5LwM+ZAD1XSMfySQK/FhpXgAbU+/nxBs8kvlJaK0I6D/64J5zpQ==} dev: false /unpipe@1.0.0: diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 39c5cf35574..30fb4248b0b 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -712,6 +712,22 @@ ''' # --- # name: TestDecide.test_flag_with_behavioural_cohorts.5 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecide.test_flag_with_behavioural_cohorts.6 ''' SELECT "posthog_cohort"."id", "posthog_cohort"."name", @@ -736,6 +752,22 @@ AND "posthog_cohort"."team_id" = 99999) ''' # --- +# name: TestDecide.test_flag_with_behavioural_cohorts.7 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- # name: TestDecide.test_flag_with_regular_cohorts ''' SELECT "posthog_hogfunction"."id", diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 99a1a031c08..ecc40e634a4 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -2624,12 +2624,12 @@ class TestDecide(BaseTest, QueryMatchingTest): created_by=self.user, ) - with self.assertNumQueries(5): + with self.assertNumQueries(6): response = self._post_decide(api_version=3, distinct_id="example_id_1") self.assertEqual(response.json()["featureFlags"], {}) self.assertEqual(response.json()["errorsWhileComputingFlags"], True) - with self.assertNumQueries(5): + with self.assertNumQueries(6): response = self._post_decide(api_version=3, distinct_id="another_id") self.assertEqual(response.json()["featureFlags"], {}) self.assertEqual(response.json()["errorsWhileComputingFlags"], True) diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index d7a2caebe16..e8e4b1b5377 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -679,7 +679,7 @@ def sync_batch_export(batch_export: BatchExport, created: bool): start_at=batch_export.start_at, end_at=batch_export.end_at, intervals=[ScheduleIntervalSpec(every=batch_export.interval_time_delta)], - jitter=(batch_export.interval_time_delta / 12), + jitter=(batch_export.interval_time_delta / 6), time_zone_name=batch_export.team.timezone, ), state=state, diff --git a/posthog/clickhouse/kafka_engine.py b/posthog/clickhouse/kafka_engine.py index abfe456bf6a..4b30047fc7d 100644 --- a/posthog/clickhouse/kafka_engine.py +++ b/posthog/clickhouse/kafka_engine.py @@ -35,6 +35,8 @@ KAFKA_COLUMNS_WITH_PARTITION = """ , _partition UInt64 """ +KAFKA_TIMESTAMP_MS_COLUMN = "_timestamp_ms DateTime64" + def kafka_engine(topic: str, kafka_host: str | None = None, group="group1", serialization="JSONEachRow") -> str: if kafka_host is None: diff --git a/posthog/clickhouse/migrations/0086_events_recent_table.py b/posthog/clickhouse/migrations/0086_events_recent_table.py new file mode 100644 index 00000000000..e672cab01e0 --- /dev/null +++ b/posthog/clickhouse/migrations/0086_events_recent_table.py @@ -0,0 +1,14 @@ +from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions +from posthog.models.event.sql import ( + EVENTS_RECENT_TABLE_JSON_MV_SQL, + EVENTS_RECENT_TABLE_SQL, + KAFKA_EVENTS_RECENT_TABLE_JSON_SQL, + DISTRIBUTED_EVENTS_RECENT_TABLE_SQL, +) + +operations = [ + run_sql_with_exceptions(EVENTS_RECENT_TABLE_SQL()), + run_sql_with_exceptions(KAFKA_EVENTS_RECENT_TABLE_JSON_SQL()), + run_sql_with_exceptions(EVENTS_RECENT_TABLE_JSON_MV_SQL()), + run_sql_with_exceptions(DISTRIBUTED_EVENTS_RECENT_TABLE_SQL()), +] diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_data_warehouse_query.py b/posthog/hogql_queries/insights/trends/test/test_trends_data_warehouse_query.py index 3ceaf1af473..4d450157854 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_data_warehouse_query.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_data_warehouse_query.py @@ -17,8 +17,8 @@ from posthog.schema import ( TrendsQuery, TrendsFilter, ) -from posthog.test.base import BaseTest -from posthog.warehouse.models import DataWarehouseTable, DataWarehouseCredential +from posthog.test.base import BaseTest, _create_event +from posthog.warehouse.models import DataWarehouseTable, DataWarehouseCredential, DataWarehouseJoin from boto3 import resource from botocore.config import Config @@ -268,6 +268,82 @@ class TestTrendsDataWarehouseQuery(ClickhouseTestMixin, BaseTest): assert response.results[3][1] == [0, 0, 0, 1, 0, 0, 0] assert response.results[3][2] == "d" + def test_trends_breakdown_with_event_property(self): + table_name = self.create_parquet_file() + + _create_event( + distinct_id="1", + event="a", + properties={"$feature/prop_1": "a"}, + timestamp="2023-01-01 00:00:00", + team=self.team, + ) + _create_event( + distinct_id="1", + event="b", + properties={"$feature/prop_1": "b"}, + timestamp="2023-01-01 00:00:00", + team=self.team, + ) + _create_event( + distinct_id="1", + event="c", + properties={"$feature/prop_1": "c"}, + timestamp="2023-01-01 00:00:00", + team=self.team, + ) + _create_event( + distinct_id="1", + event="d", + properties={"$feature/prop_1": "d"}, + timestamp="2023-01-01 00:00:00", + team=self.team, + ) + + DataWarehouseJoin.objects.create( + team=self.team, + source_table_name=table_name, + source_table_key="prop_1", + joining_table_name="events", + joining_table_key="event", + field_name="events", + ) + + trends_query = TrendsQuery( + kind="TrendsQuery", + dateRange=InsightDateRange(date_from="2023-01-01"), + series=[ + DataWarehouseNode( + id=table_name, + table_name=table_name, + id_field="id", + distinct_id_field="customer_email", + timestamp_field="created", + ) + ], + breakdownFilter=BreakdownFilter( + breakdown_type=BreakdownType.DATA_WAREHOUSE, breakdown="events.properties.$feature/prop_1" + ), + ) + + with freeze_time("2023-01-07"): + response = self.get_response(trends_query=trends_query) + + assert response.columns is not None + assert set(response.columns).issubset({"date", "total", "breakdown_value"}) + assert len(response.results) == 4 + assert response.results[0][1] == [1, 0, 0, 0, 0, 0, 0] + assert response.results[0][2] == "a" + + assert response.results[1][1] == [0, 1, 0, 0, 0, 0, 0] + assert response.results[1][2] == "b" + + assert response.results[2][1] == [0, 0, 1, 0, 0, 0, 0] + assert response.results[2][2] == "c" + + assert response.results[3][1] == [0, 0, 0, 1, 0, 0, 0] + assert response.results[3][2] == "d" + @snapshot_clickhouse_queries def test_trends_breakdown_on_view(self): from posthog.warehouse.models import DataWarehouseSavedQuery diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index d66695fbac9..668cd8b2afb 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -888,7 +888,16 @@ class TrendsQueryRunner(QueryRunner): if not table_or_view: raise ValueError(f"Table {series.table_name} not found") - field_type = dict(table_or_view.columns)[self.query.breakdownFilter.breakdown]["clickhouse"] + breakdown_key = ( + self.query.breakdownFilter.breakdown[0] + if isinstance(self.query.breakdownFilter.breakdown, list) + else self.query.breakdownFilter.breakdown + ) + + if breakdown_key not in dict(table_or_view.columns): + return False + + field_type = dict(table_or_view.columns)[breakdown_key]["clickhouse"] if field_type.startswith("Nullable("): field_type = field_type.replace("Nullable(", "")[:-1] diff --git a/posthog/hogql_queries/insights/trends/utils.py b/posthog/hogql_queries/insights/trends/utils.py index 09bbaa5b0e1..d6baf201e4d 100644 --- a/posthog/hogql_queries/insights/trends/utils.py +++ b/posthog/hogql_queries/insights/trends/utils.py @@ -26,7 +26,7 @@ def get_properties_chain( raise Exception("group_type_index missing from params") if breakdown_type == "data_warehouse": - return [breakdown_field] + return [*breakdown_field.split(".")] if breakdown_type == "data_warehouse_person_property": return ["person", *breakdown_field.split(".")] diff --git a/posthog/migrations/0521_alter_errortrackingstackframe_context.py b/posthog/migrations/0521_alter_errortrackingstackframe_context.py new file mode 100644 index 00000000000..6034b72334a --- /dev/null +++ b/posthog/migrations/0521_alter_errortrackingstackframe_context.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-11-19 10:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0520_experiment_metrics_secondary"), + ] + + operations = [ + migrations.AlterField( + model_name="errortrackingstackframe", + name="context", + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt index 31b8c212f62..8d2eb0a2493 100644 --- a/posthog/migrations/max_migration.txt +++ b/posthog/migrations/max_migration.txt @@ -1 +1 @@ -0520_experiment_metrics_secondary +0521_alter_errortrackingstackframe_context diff --git a/posthog/models/error_tracking/error_tracking.py b/posthog/models/error_tracking/error_tracking.py index 7e42f55cf9f..f2e3e227665 100644 --- a/posthog/models/error_tracking/error_tracking.py +++ b/posthog/models/error_tracking/error_tracking.py @@ -76,7 +76,7 @@ class ErrorTrackingStackFrame(UUIDModel): contents = models.JSONField(null=False, blank=False) resolved = models.BooleanField(null=False, blank=False) # The context around the frame, +/- a few lines, if we can get it - context = models.TextField(null=True, blank=True) + context = models.JSONField(null=True, blank=True) class Meta: indexes = [ diff --git a/posthog/models/event/sql.py b/posthog/models/event/sql.py index 89beb3739c2..67f5ac97325 100644 --- a/posthog/models/event/sql.py +++ b/posthog/models/event/sql.py @@ -4,6 +4,8 @@ from posthog.clickhouse.base_sql import COPY_ROWS_BETWEEN_TEAMS_BASE_SQL from posthog.clickhouse.indexes import index_by_kafka_timestamp from posthog.clickhouse.kafka_engine import ( KAFKA_COLUMNS, + KAFKA_COLUMNS_WITH_PARTITION, + KAFKA_TIMESTAMP_MS_COLUMN, STORAGE_POLICY, kafka_engine, trim_quotes_expr, @@ -18,7 +20,7 @@ from posthog.kafka_client.topics import KAFKA_EVENTS_JSON EVENTS_DATA_TABLE = lambda: "sharded_events" WRITABLE_EVENTS_DATA_TABLE = lambda: "writable_events" - +EVENTS_RECENT_DATA_TABLE = lambda: "events_recent" TRUNCATE_EVENTS_TABLE_SQL = ( lambda: f"TRUNCATE TABLE IF EXISTS {EVENTS_DATA_TABLE()} ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'" ) @@ -185,6 +187,86 @@ FROM {database}.kafka_events_json ) ) + +KAFKA_EVENTS_RECENT_TABLE_JSON_SQL = lambda: ( + EVENTS_TABLE_BASE_SQL + + """ + SETTINGS kafka_skip_broken_messages = 100 +""" +).format( + table_name="kafka_events_recent_json", + cluster=settings.CLICKHOUSE_CLUSTER, + engine=kafka_engine(topic=KAFKA_EVENTS_JSON, group="group1_recent"), + extra_fields="", + materialized_columns="", + indexes="", +) + +EVENTS_RECENT_TABLE_JSON_MV_SQL = ( + lambda: """ +CREATE MATERIALIZED VIEW IF NOT EXISTS events_recent_json_mv ON CLUSTER '{cluster}' +TO {database}.{target_table} +AS SELECT +uuid, +event, +properties, +timestamp, +team_id, +distinct_id, +elements_chain, +created_at, +person_id, +person_created_at, +person_properties, +group0_properties, +group1_properties, +group2_properties, +group3_properties, +group4_properties, +group0_created_at, +group1_created_at, +group2_created_at, +group3_created_at, +group4_created_at, +person_mode, +_timestamp, +_timestamp_ms, +_offset, +_partition +FROM {database}.kafka_events_recent_json +""".format( + target_table=EVENTS_RECENT_DATA_TABLE(), + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) +) + +EVENTS_RECENT_TABLE_SQL = lambda: ( + EVENTS_TABLE_BASE_SQL + + """PARTITION BY toStartOfHour(_timestamp) +ORDER BY (team_id, toStartOfHour(_timestamp), event, cityHash64(distinct_id), cityHash64(uuid)) +TTL _timestamp + INTERVAL 7 DAY +{storage_policy} +""" +).format( + table_name=EVENTS_RECENT_DATA_TABLE(), + cluster=settings.CLICKHOUSE_CLUSTER, + engine=ReplacingMergeTree(EVENTS_RECENT_DATA_TABLE(), ver="_timestamp"), + extra_fields=KAFKA_COLUMNS_WITH_PARTITION + INSERTED_AT_COLUMN + f", {KAFKA_TIMESTAMP_MS_COLUMN}", + materialized_columns="", + indexes="", + storage_policy=STORAGE_POLICY(), +) + +DISTRIBUTED_EVENTS_RECENT_TABLE_SQL = lambda: EVENTS_TABLE_BASE_SQL.format( + table_name="distributed_events_recent", + cluster=settings.CLICKHOUSE_CLUSTER, + engine=Distributed(data_table=EVENTS_RECENT_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"), + extra_fields=KAFKA_COLUMNS_WITH_PARTITION + INSERTED_AT_COLUMN + f", {KAFKA_TIMESTAMP_MS_COLUMN}", + materialized_columns="", + indexes="", +) + # Distributed engine tables are only created if CLICKHOUSE_REPLICATED # This table is responsible for writing to sharded_events based on a sharding key. diff --git a/posthog/temporal/common/asyncpa.py b/posthog/temporal/common/asyncpa.py index fd9e1bd9e84..533f085c3d4 100644 --- a/posthog/temporal/common/asyncpa.py +++ b/posthog/temporal/common/asyncpa.py @@ -16,7 +16,7 @@ class InvalidMessageFormat(Exception): class AsyncMessageReader: """Asynchronously read PyArrow messages from bytes iterator.""" - def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]): + def __init__(self, bytes_iter: typing.AsyncIterator[bytes]): self._bytes = bytes_iter self._buffer = bytearray() @@ -64,7 +64,7 @@ class AsyncMessageReader: async def read_until(self, n: int) -> None: """Read from self._bytes until there are at least n bytes in self._buffer.""" while len(self._buffer) < n: - bytes, _ = await anext(self._bytes) + bytes = await anext(self._bytes) self._buffer.extend(bytes) def parse_body_size(self, metadata_flatbuffer: bytearray) -> int: @@ -105,7 +105,7 @@ class AsyncMessageReader: class AsyncRecordBatchReader: """Asynchronously read PyArrow RecordBatches from an iterator of bytes.""" - def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]) -> None: + def __init__(self, bytes_iter: typing.AsyncIterator[bytes]) -> None: self._reader = AsyncMessageReader(bytes_iter) self._schema: None | pa.Schema = None @@ -137,7 +137,7 @@ class AsyncRecordBatchReader: class AsyncRecordBatchProducer(AsyncRecordBatchReader): - def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]) -> None: + def __init__(self, bytes_iter: typing.AsyncIterator[bytes]) -> None: super().__init__(bytes_iter) async def produce(self, queue: asyncio.Queue): diff --git a/posthog/temporal/common/clickhouse.py b/posthog/temporal/common/clickhouse.py index c7f39a56471..bc618eb2dbf 100644 --- a/posthog/temporal/common/clickhouse.py +++ b/posthog/temporal/common/clickhouse.py @@ -6,6 +6,7 @@ import json import ssl import typing import uuid +import structlog import aiohttp import pyarrow as pa @@ -14,6 +15,8 @@ from django.conf import settings import posthog.temporal.common.asyncpa as asyncpa +logger = structlog.get_logger() + def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: """Encode data for ClickHouse. @@ -78,6 +81,29 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: return f"{quote_char}{str_data}{quote_char}".encode() +class ChunkBytesAsyncStreamIterator: + """Async iterator of HTTP chunk bytes. + + Similar to the class provided by aiohttp, but this allows us to control + when to stop iteration. + """ + + def __init__(self, stream: aiohttp.StreamReader) -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkBytesAsyncStreamIterator": + return self + + async def __anext__(self) -> bytes: + data, end_of_chunk = await self._stream.readchunk() + + if data == b"" and end_of_chunk is False and self._stream.at_eof(): + await logger.adebug("At EOF, stopping chunk iteration") + raise StopAsyncIteration + + return data + + class ClickHouseClientNotConnected(Exception): """Exception raised when attempting to run an async query without connecting.""" @@ -386,7 +412,7 @@ class ClickHouseClient: This method makes sense when running with FORMAT ArrowStream, although we currently do not enforce this. """ async with self.apost_query(query, *data, query_parameters=query_parameters, query_id=query_id) as response: - reader = asyncpa.AsyncRecordBatchReader(response.content.iter_chunks()) + reader = asyncpa.AsyncRecordBatchReader(ChunkBytesAsyncStreamIterator(response.content)) async for batch in reader: yield batch @@ -405,7 +431,7 @@ class ClickHouseClient: downstream consumer tasks process them from the queue. """ async with self.apost_query(query, *data, query_parameters=query_parameters, query_id=query_id) as response: - reader = asyncpa.AsyncRecordBatchProducer(response.content.iter_chunks()) + reader = asyncpa.AsyncRecordBatchProducer(ChunkBytesAsyncStreamIterator(response.content)) await reader.produce(queue=queue) async def __aenter__(self): diff --git a/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json b/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json index 9329bd2f2da..50533517b91 100644 --- a/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json +++ b/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json @@ -12,7 +12,7 @@ "Jsonb", "Bool", "Uuid", - "Text" + "Jsonb" ] }, "nullable": [] diff --git a/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json b/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json index 515f058fad9..e4d2abe66f9 100644 --- a/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json +++ b/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json @@ -36,7 +36,7 @@ { "ordinal": 6, "name": "context", - "type_info": "Text" + "type_info": "Jsonb" } ], "parameters": { diff --git a/rust/cymbal/src/config.rs b/rust/cymbal/src/config.rs index 3bd5ae0f4c3..8ab3e69ab18 100644 --- a/rust/cymbal/src/config.rs +++ b/rust/cymbal/src/config.rs @@ -46,7 +46,7 @@ pub struct Config { #[envconfig(default = "100000000")] // 100MB - in prod, we should use closer to 1-10GB pub symbol_store_cache_max_bytes: usize, - #[envconfig(default = "http://localhost:19000")] // minio + #[envconfig(default = "http://127.0.0.1:19000")] // minio pub object_storage_endpoint: String, #[envconfig(default = "symbol_sets")] diff --git a/rust/cymbal/src/error.rs b/rust/cymbal/src/error.rs index 37bd6ae57ec..2febd84cb15 100644 --- a/rust/cymbal/src/error.rs +++ b/rust/cymbal/src/error.rs @@ -20,7 +20,7 @@ pub enum UnhandledError { KafkaError(#[from] KafkaError), #[error("Sqlx error: {0}")] SqlxError(#[from] sqlx::Error), - #[error(transparent)] + #[error("S3 error: {0}")] S3Error(#[from] Box), #[error(transparent)] ByteStreamError(#[from] ByteStreamError), // AWS specific bytestream error. Idk diff --git a/rust/cymbal/src/frames/records.rs b/rust/cymbal/src/frames/records.rs index 8b969f060f7..ef17a0f991b 100644 --- a/rust/cymbal/src/frames/records.rs +++ b/rust/cymbal/src/frames/records.rs @@ -44,7 +44,7 @@ impl ErrorTrackingStackFrame { E: Executor<'c, Database = sqlx::Postgres>, { let context = if let Some(context) = &self.context { - Some(serde_json::to_string(context)?) + Some(serde_json::to_value(context)?) } else { None }; @@ -66,7 +66,7 @@ impl ErrorTrackingStackFrame { serde_json::to_value(&self.contents)?, self.resolved, Uuid::now_v7(), - context + context, ).execute(e).await?; Ok(()) } @@ -86,7 +86,7 @@ impl ErrorTrackingStackFrame { symbol_set_id: Option, contents: Value, resolved: bool, - context: Option, + context: Option, } let res = sqlx::query_as!( Returned, @@ -109,10 +109,11 @@ impl ErrorTrackingStackFrame { // and so when we load a frame record we need to patch back up the context onto the frame, // since we dropped it when we serialised the frame during saving. let mut frame: Frame = serde_json::from_value(found.contents)?; - let context = if let Some(context) = found.context.as_ref() { + + let context = if let Some(context) = found.context { // We serialise the frame context as a json string, but it's a structure we have to manually // deserialise back into the frame. - Some(serde_json::from_str(context)?) + serde_json::from_value(context)? } else { None }; diff --git a/rust/cymbal/tests/test_migrations/20241101134611_test_migration_for_symbol_set_saving_tests.sql b/rust/cymbal/tests/test_migrations/20241101134611_test_migration_for_symbol_set_saving_tests.sql index 88029256999..b9b73d3a43d 100644 --- a/rust/cymbal/tests/test_migrations/20241101134611_test_migration_for_symbol_set_saving_tests.sql +++ b/rust/cymbal/tests/test_migrations/20241101134611_test_migration_for_symbol_set_saving_tests.sql @@ -19,7 +19,7 @@ CREATE TABLE IF NOT EXISTS posthog_errortrackingstackframe ( symbol_set_id UUID, contents JSONB NOT NULL, resolved BOOLEAN NOT NULL, - context TEXT, + context JSONB, UNIQUE(raw_id, team_id) ); diff --git a/rust/feature-flags/src/v0_endpoint.rs b/rust/feature-flags/src/api/endpoint.rs similarity index 95% rename from rust/feature-flags/src/v0_endpoint.rs rename to rust/feature-flags/src/api/endpoint.rs index 9adfa67e882..e995cfd5dc1 100644 --- a/rust/feature-flags/src/v0_endpoint.rs +++ b/rust/feature-flags/src/api/endpoint.rs @@ -1,8 +1,9 @@ use std::net::IpAddr; use crate::{ - api::{FlagError, FlagsResponse}, - request_handler::{process_request, FlagsQueryParams, RequestContext}, + api::errors::FlagError, + api::handler::{process_request, FlagsQueryParams, RequestContext}, + api::types::FlagsResponse, router, }; // TODO: stream this instead diff --git a/rust/feature-flags/src/api.rs b/rust/feature-flags/src/api/errors.rs similarity index 84% rename from rust/feature-flags/src/api.rs rename to rust/feature-flags/src/api/errors.rs index be21c1c37f5..d2f1de10a3f 100644 --- a/rust/feature-flags/src/api.rs +++ b/rust/feature-flags/src/api/errors.rs @@ -1,60 +1,9 @@ -use std::collections::HashMap; - use axum::http::StatusCode; use axum::response::{IntoResponse, Response}; -use serde::{Deserialize, Serialize}; use thiserror::Error; -use crate::database::CustomDatabaseError; -use crate::redis::CustomRedisError; - -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] -pub enum FlagsResponseCode { - Ok = 1, -} - -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] -#[serde(untagged)] -pub enum FlagValue { - Boolean(bool), - String(String), -} - -// TODO the following two types are kinda general, maybe we should move them to a shared module -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] -#[serde(untagged)] -pub enum BooleanOrStringObject { - Boolean(bool), - Object(HashMap), -} - -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] -#[serde(untagged)] -pub enum BooleanOrBooleanObject { - Boolean(bool), - Object(HashMap), -} - -#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct FlagsResponse { - pub error_while_computing_flags: bool, - pub feature_flags: HashMap, - // TODO support the other fields in the payload - // pub config: HashMap, - // pub toolbar_params: HashMap, - // pub is_authenticated: bool, - // pub supported_compression: Vec, - // pub session_recording: bool, - // pub feature_flag_payloads: HashMap, - // pub capture_performance: BooleanOrBooleanObject, - // #[serde(rename = "autocapture_opt_out")] - // pub autocapture_opt_out: bool, - // pub autocapture_exceptions: BooleanOrStringObject, - // pub surveys: bool, - // pub heatmaps: bool, - // pub site_apps: Vec, -} +use crate::client::database::CustomDatabaseError; +use crate::client::redis::CustomRedisError; #[derive(Error, Debug)] pub enum ClientFacingError { diff --git a/rust/feature-flags/src/request_handler.rs b/rust/feature-flags/src/api/handler.rs similarity index 97% rename from rust/feature-flags/src/request_handler.rs rename to rust/feature-flags/src/api/handler.rs index 5ef43896e64..26c75276785 100644 --- a/rust/feature-flags/src/request_handler.rs +++ b/rust/feature-flags/src/api/handler.rs @@ -1,11 +1,12 @@ use crate::{ - api::{FlagError, FlagsResponse}, - cohort_cache::CohortCacheManager, - database::Client, - flag_definitions::FeatureFlagList, - flag_matching::{FeatureFlagMatcher, GroupTypeMappingCache}, - flag_request::FlagRequest, - geoip::GeoIpClient, + api::errors::FlagError, + api::types::FlagsResponse, + client::database::Client, + client::geoip::GeoIpClient, + cohort::cohort_cache_manager::CohortCacheManager, + flags::flag_matching::{FeatureFlagMatcher, GroupTypeMappingCache}, + flags::flag_models::FeatureFlagList, + flags::flag_request::FlagRequest, router, }; use axum::{extract::State, http::HeaderMap}; @@ -254,10 +255,13 @@ fn decompress_gzip(compressed: Bytes) -> Result { #[cfg(test)] mod tests { use crate::{ - api::FlagValue, + api::types::FlagValue, config::Config, - flag_definitions::{FeatureFlag, FlagFilters, FlagGroupType, OperatorType, PropertyFilter}, - test_utils::{insert_new_team_in_pg, setup_pg_reader_client, setup_pg_writer_client}, + flags::flag_models::{FeatureFlag, FlagFilters, FlagGroupType}, + properties::property_models::{OperatorType, PropertyFilter}, + utils::test_utils::{ + insert_new_team_in_pg, setup_pg_reader_client, setup_pg_writer_client, + }, }; use super::*; diff --git a/rust/feature-flags/src/api/mod.rs b/rust/feature-flags/src/api/mod.rs new file mode 100644 index 00000000000..7ccf71dc5fe --- /dev/null +++ b/rust/feature-flags/src/api/mod.rs @@ -0,0 +1,4 @@ +pub mod endpoint; +pub mod errors; +pub mod handler; +pub mod types; diff --git a/rust/feature-flags/src/api/types.rs b/rust/feature-flags/src/api/types.rs new file mode 100644 index 00000000000..3eb81b7d1ad --- /dev/null +++ b/rust/feature-flags/src/api/types.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +pub enum FlagsResponseCode { + Ok = 1, +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +#[serde(untagged)] +pub enum FlagValue { + Boolean(bool), + String(String), +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FlagsResponse { + pub error_while_computing_flags: bool, + pub feature_flags: HashMap, +} diff --git a/rust/feature-flags/src/database.rs b/rust/feature-flags/src/client/database.rs similarity index 100% rename from rust/feature-flags/src/database.rs rename to rust/feature-flags/src/client/database.rs diff --git a/rust/feature-flags/src/geoip.rs b/rust/feature-flags/src/client/geoip.rs similarity index 100% rename from rust/feature-flags/src/geoip.rs rename to rust/feature-flags/src/client/geoip.rs diff --git a/rust/feature-flags/src/client/mod.rs b/rust/feature-flags/src/client/mod.rs new file mode 100644 index 00000000000..dc34e1e6c12 --- /dev/null +++ b/rust/feature-flags/src/client/mod.rs @@ -0,0 +1,3 @@ +pub mod database; +pub mod geoip; +pub mod redis; diff --git a/rust/feature-flags/src/redis.rs b/rust/feature-flags/src/client/redis.rs similarity index 100% rename from rust/feature-flags/src/redis.rs rename to rust/feature-flags/src/client/redis.rs diff --git a/rust/feature-flags/src/cohort_cache.rs b/rust/feature-flags/src/cohort/cohort_cache_manager.rs similarity index 94% rename from rust/feature-flags/src/cohort_cache.rs rename to rust/feature-flags/src/cohort/cohort_cache_manager.rs index 68894c19f88..54955356593 100644 --- a/rust/feature-flags/src/cohort_cache.rs +++ b/rust/feature-flags/src/cohort/cohort_cache_manager.rs @@ -1,6 +1,6 @@ -use crate::api::FlagError; -use crate::cohort_models::Cohort; -use crate::flag_matching::{PostgresReader, TeamId}; +use crate::api::errors::FlagError; +use crate::cohort::cohort_models::Cohort; +use crate::flags::flag_matching::{PostgresReader, TeamId}; use moka::future::Cache; use std::time::Duration; @@ -74,8 +74,8 @@ impl CohortCacheManager { #[cfg(test)] mod tests { use super::*; - use crate::cohort_models::Cohort; - use crate::test_utils::{ + use crate::cohort::cohort_models::Cohort; + use crate::utils::test_utils::{ insert_cohort_for_team_in_pg, insert_new_team_in_pg, setup_pg_reader_client, setup_pg_writer_client, }; @@ -84,15 +84,15 @@ mod tests { /// Helper function to setup a new team for testing. async fn setup_test_team( - writer_client: Arc, + writer_client: Arc, ) -> Result { - let team = crate::test_utils::insert_new_team_in_pg(writer_client, None).await?; + let team = insert_new_team_in_pg(writer_client, None).await?; Ok(team.id) } /// Helper function to insert a cohort for a team. async fn setup_test_cohort( - writer_client: Arc, + writer_client: Arc, team_id: TeamId, name: Option, ) -> Result { diff --git a/rust/feature-flags/src/cohort_models.rs b/rust/feature-flags/src/cohort/cohort_models.rs similarity index 95% rename from rust/feature-flags/src/cohort_models.rs rename to rust/feature-flags/src/cohort/cohort_models.rs index d1099839017..947668b6fdb 100644 --- a/rust/feature-flags/src/cohort_models.rs +++ b/rust/feature-flags/src/cohort/cohort_models.rs @@ -1,4 +1,4 @@ -use crate::flag_definitions::PropertyFilter; +use crate::properties::property_models::PropertyFilter; use serde::{Deserialize, Serialize}; use sqlx::FromRow; diff --git a/rust/feature-flags/src/cohort_operations.rs b/rust/feature-flags/src/cohort/cohort_operations.rs similarity index 97% rename from rust/feature-flags/src/cohort_operations.rs rename to rust/feature-flags/src/cohort/cohort_operations.rs index ea4214ccdc0..b987ae3e225 100644 --- a/rust/feature-flags/src/cohort_operations.rs +++ b/rust/feature-flags/src/cohort/cohort_operations.rs @@ -2,8 +2,11 @@ use std::collections::HashSet; use std::sync::Arc; use tracing::instrument; -use crate::cohort_models::{Cohort, CohortId, CohortProperty, InnerCohortProperty}; -use crate::{api::FlagError, database::Client as DatabaseClient, flag_definitions::PropertyFilter}; +use crate::cohort::cohort_models::{Cohort, CohortId, CohortProperty, InnerCohortProperty}; +use crate::{ + api::errors::FlagError, client::database::Client as DatabaseClient, + properties::property_models::PropertyFilter, +}; impl Cohort { /// Returns a cohort from postgres given a cohort_id and team_id @@ -185,8 +188,8 @@ impl InnerCohortProperty { mod tests { use super::*; use crate::{ - cohort_models::{CohortPropertyType, CohortValues}, - test_utils::{ + cohort::cohort_models::{CohortPropertyType, CohortValues}, + utils::test_utils::{ insert_cohort_for_team_in_pg, insert_new_team_in_pg, setup_pg_reader_client, setup_pg_writer_client, }, diff --git a/rust/feature-flags/src/cohort/mod.rs b/rust/feature-flags/src/cohort/mod.rs new file mode 100644 index 00000000000..bf51554a830 --- /dev/null +++ b/rust/feature-flags/src/cohort/mod.rs @@ -0,0 +1,3 @@ +pub mod cohort_cache_manager; +pub mod cohort_models; +pub mod cohort_operations; diff --git a/rust/feature-flags/src/flag_analytics.rs b/rust/feature-flags/src/flags/flag_analytics.rs similarity index 94% rename from rust/feature-flags/src/flag_analytics.rs rename to rust/feature-flags/src/flags/flag_analytics.rs index 6bdfcb4b2e9..fe65d08a602 100644 --- a/rust/feature-flags/src/flag_analytics.rs +++ b/rust/feature-flags/src/flags/flag_analytics.rs @@ -2,8 +2,8 @@ use anyhow::Result; use std::sync::Arc; use std::time::{SystemTime, UNIX_EPOCH}; -use crate::flag_request::FlagRequestType; -use crate::redis::{Client as RedisClient, CustomRedisError}; +use crate::client::redis::{Client as RedisClient, CustomRedisError}; +use crate::flags::flag_request::FlagRequestType; const CACHE_BUCKET_SIZE: u64 = 60 * 2; // duration in seconds @@ -37,7 +37,7 @@ pub async fn increment_request_count( #[cfg(test)] mod tests { use super::*; - use crate::test_utils::setup_redis_client; + use crate::utils::test_utils::setup_redis_client; #[tokio::test] async fn test_get_team_request_key() { diff --git a/rust/feature-flags/src/feature_flag_match_reason.rs b/rust/feature-flags/src/flags/flag_match_reason.rs similarity index 100% rename from rust/feature-flags/src/feature_flag_match_reason.rs rename to rust/feature-flags/src/flags/flag_match_reason.rs diff --git a/rust/feature-flags/src/flag_matching.rs b/rust/feature-flags/src/flags/flag_matching.rs similarity index 99% rename from rust/feature-flags/src/flag_matching.rs rename to rust/feature-flags/src/flags/flag_matching.rs index d9332fce4e4..ea04f6fb00b 100644 --- a/rust/feature-flags/src/flag_matching.rs +++ b/rust/feature-flags/src/flags/flag_matching.rs @@ -1,14 +1,14 @@ -use crate::{ - api::{FlagError, FlagValue, FlagsResponse}, - cohort_cache::CohortCacheManager, - cohort_models::{Cohort, CohortId}, - database::Client as DatabaseClient, - feature_flag_match_reason::FeatureFlagMatchReason, - flag_definitions::{FeatureFlag, FeatureFlagList, FlagGroupType, OperatorType, PropertyFilter}, - metrics_consts::{FLAG_EVALUATION_ERROR_COUNTER, FLAG_HASH_KEY_WRITES_COUNTER}, - metrics_utils::parse_exception_for_prometheus_label, - property_matching::match_property, -}; +use crate::api::errors::FlagError; +use crate::api::types::{FlagValue, FlagsResponse}; +use crate::client::database::Client as DatabaseClient; +use crate::cohort::cohort_cache_manager::CohortCacheManager; +use crate::cohort::cohort_models::{Cohort, CohortId}; +use crate::flags::flag_match_reason::FeatureFlagMatchReason; +use crate::flags::flag_models::{FeatureFlag, FeatureFlagList, FlagGroupType}; +use crate::metrics::metrics_consts::{FLAG_EVALUATION_ERROR_COUNTER, FLAG_HASH_KEY_WRITES_COUNTER}; +use crate::metrics::metrics_utils::parse_exception_for_prometheus_label; +use crate::properties::property_matching::match_property; +use crate::properties::property_models::{OperatorType, PropertyFilter}; use anyhow::Result; use common_metrics::inc; use petgraph::algo::{is_cyclic_directed, toposort}; @@ -1796,11 +1796,11 @@ mod tests { use super::*; use crate::{ - flag_definitions::{ + flags::flag_models::{ FeatureFlagRow, FlagFilters, MultivariateFlagOptions, MultivariateFlagVariant, - OperatorType, }, - test_utils::{ + properties::property_models::OperatorType, + utils::test_utils::{ add_person_to_cohort, get_person_id_by_distinct_id, insert_cohort_for_team_in_pg, insert_flag_for_team_in_pg, insert_new_team_in_pg, insert_person_for_team_in_pg, setup_pg_reader_client, setup_pg_writer_client, diff --git a/rust/feature-flags/src/flags/flag_models.rs b/rust/feature-flags/src/flags/flag_models.rs new file mode 100644 index 00000000000..7c76c2531b7 --- /dev/null +++ b/rust/feature-flags/src/flags/flag_models.rs @@ -0,0 +1,70 @@ +use serde::{Deserialize, Serialize}; + +use crate::properties::property_models::PropertyFilter; + +// TRICKY: This cache data is coming from django-redis. If it ever goes out of sync, we'll bork. +// TODO: Add integration tests across repos to ensure this doesn't happen. +pub const TEAM_FLAGS_CACHE_PREFIX: &str = "posthog:1:team_feature_flags_"; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct FlagGroupType { + pub properties: Option>, + pub rollout_percentage: Option, + pub variant: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct MultivariateFlagVariant { + pub key: String, + pub name: Option, + pub rollout_percentage: f64, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct MultivariateFlagOptions { + pub variants: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct FlagFilters { + pub groups: Vec, + pub multivariate: Option, + pub aggregation_group_type_index: Option, + pub payloads: Option, + pub super_groups: Option>, +} + +// TODO: see if you can combine these two structs, like we do with cohort models +// this will require not deserializing on read and instead doing it lazily, on-demand +// (which, tbh, is probably a better idea) +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct FeatureFlag { + pub id: i32, + pub team_id: i32, + pub name: Option, + pub key: String, + pub filters: FlagFilters, + #[serde(default)] + pub deleted: bool, + #[serde(default)] + pub active: bool, + #[serde(default)] + pub ensure_experience_continuity: bool, +} + +#[derive(Debug, Serialize, sqlx::FromRow)] +pub struct FeatureFlagRow { + pub id: i32, + pub team_id: i32, + pub name: Option, + pub key: String, + pub filters: serde_json::Value, + pub deleted: bool, + pub active: bool, + pub ensure_experience_continuity: bool, +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct FeatureFlagList { + pub flags: Vec, +} diff --git a/rust/feature-flags/src/flag_definitions.rs b/rust/feature-flags/src/flags/flag_operations.rs similarity index 94% rename from rust/feature-flags/src/flag_definitions.rs rename to rust/feature-flags/src/flags/flag_operations.rs index d62ecc9e0e0..0bb357b7eca 100644 --- a/rust/feature-flags/src/flag_definitions.rs +++ b/rust/feature-flags/src/flags/flag_operations.rs @@ -1,52 +1,12 @@ -use crate::{ - api::FlagError, cohort_models::CohortId, database::Client as DatabaseClient, - redis::Client as RedisClient, -}; -use serde::{Deserialize, Serialize}; +use crate::api::errors::FlagError; +use crate::client::database::Client as DatabaseClient; +use crate::client::redis::Client as RedisClient; +use crate::cohort::cohort_models::CohortId; +use crate::flags::flag_models::*; +use crate::properties::property_models::PropertyFilter; use std::sync::Arc; use tracing::instrument; -// TRICKY: This cache data is coming from django-redis. If it ever goes out of sync, we'll bork. -// TODO: Add integration tests across repos to ensure this doesn't happen. -pub const TEAM_FLAGS_CACHE_PREFIX: &str = "posthog:1:team_feature_flags_"; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum OperatorType { - Exact, - IsNot, - Icontains, - NotIcontains, - Regex, - NotRegex, - Gt, - Lt, - Gte, - Lte, - IsSet, - IsNotSet, - IsDateExact, - IsDateAfter, - IsDateBefore, - In, - NotIn, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct PropertyFilter { - pub key: String, - // TODO: Probably need a default for value? - // incase operators like is_set, is_not_set are used - // not guaranteed to have a value, if say created via api - pub value: serde_json::Value, - pub operator: Option, - #[serde(rename = "type")] - // TODO: worth making a enum here to differentiate between cohort and person filters? - pub prop_type: String, - pub negation: Option, - pub group_type_index: Option, -} - impl PropertyFilter { /// Checks if the filter is a cohort filter pub fn is_cohort(&self) -> bool { @@ -63,64 +23,6 @@ impl PropertyFilter { } } -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct FlagGroupType { - pub properties: Option>, - pub rollout_percentage: Option, - pub variant: Option, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct MultivariateFlagVariant { - pub key: String, - pub name: Option, - pub rollout_percentage: f64, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct MultivariateFlagOptions { - pub variants: Vec, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct FlagFilters { - pub groups: Vec, - pub multivariate: Option, - pub aggregation_group_type_index: Option, - pub payloads: Option, - pub super_groups: Option>, -} - -// TODO: see if you can combine these two structs, like we do with cohort models -// this will require not deserializing on read and instead doing it lazily, on-demand -// (which, tbh, is probably a better idea) -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct FeatureFlag { - pub id: i32, - pub team_id: i32, - pub name: Option, - pub key: String, - pub filters: FlagFilters, - #[serde(default)] - pub deleted: bool, - #[serde(default)] - pub active: bool, - #[serde(default)] - pub ensure_experience_continuity: bool, -} - -#[derive(Debug, Serialize, sqlx::FromRow)] -pub struct FeatureFlagRow { - pub id: i32, - pub team_id: i32, - pub name: Option, - pub key: String, - pub filters: serde_json::Value, - pub deleted: bool, - pub active: bool, - pub ensure_experience_continuity: bool, -} - impl FeatureFlag { pub fn get_group_type_index(&self) -> Option { self.filters.aggregation_group_type_index @@ -146,11 +48,6 @@ impl FeatureFlag { } } -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct FeatureFlagList { - pub flags: Vec, -} - impl FeatureFlagList { /// Returns feature flags from redis given a team_id #[instrument(skip_all)] @@ -243,14 +140,14 @@ impl FeatureFlagList { #[cfg(test)] mod tests { - use crate::flag_definitions; + use crate::{flags::flag_models::*, properties::property_models::OperatorType}; use rand::Rng; use serde_json::json; use std::time::Instant; use tokio::task; use super::*; - use crate::test_utils::{ + use crate::utils::test_utils::{ insert_flag_for_team_in_pg, insert_flags_for_team_in_redis, insert_new_team_in_pg, insert_new_team_in_redis, setup_invalid_pg_client, setup_pg_reader_client, setup_redis_client, @@ -803,6 +700,7 @@ mod tests { } } } + #[tokio::test] async fn test_flag_with_super_groups() { let redis_client = setup_redis_client(None); @@ -1114,7 +1012,7 @@ mod tests { redis_client .set( - format!("{}{}", flag_definitions::TEAM_FLAGS_CACHE_PREFIX, team.id), + format!("{}{}", TEAM_FLAGS_CACHE_PREFIX, team.id), "not a json".to_string(), ) .await diff --git a/rust/feature-flags/src/flag_request.rs b/rust/feature-flags/src/flags/flag_request.rs similarity index 96% rename from rust/feature-flags/src/flag_request.rs rename to rust/feature-flags/src/flags/flag_request.rs index 1cf64eb879a..89890505c6c 100644 --- a/rust/feature-flags/src/flag_request.rs +++ b/rust/feature-flags/src/flags/flag_request.rs @@ -7,8 +7,11 @@ use serde_json::Value; use tracing::instrument; use crate::{ - api::FlagError, database::Client as DatabaseClient, flag_definitions::FeatureFlagList, - metrics_consts::FLAG_CACHE_HIT_COUNTER, redis::Client as RedisClient, team::Team, + api::errors::FlagError, + client::{database::Client as DatabaseClient, redis::Client as RedisClient}, + flags::flag_models::FeatureFlagList, + metrics::metrics_consts::FLAG_CACHE_HIT_COUNTER, + team::team_models::Team, }; #[derive(Debug, Clone, Copy)] @@ -204,14 +207,17 @@ impl FlagRequest { mod tests { use std::collections::HashMap; - use crate::api::FlagError; - use crate::flag_definitions::{ - FeatureFlag, FeatureFlagList, FlagFilters, FlagGroupType, OperatorType, PropertyFilter, - TEAM_FLAGS_CACHE_PREFIX, + use crate::api::errors::FlagError; + use crate::flags::flag_models::{ + FeatureFlag, FeatureFlagList, FlagFilters, FlagGroupType, TEAM_FLAGS_CACHE_PREFIX, + }; + + use crate::flags::flag_request::FlagRequest; + use crate::properties::property_models::{OperatorType, PropertyFilter}; + use crate::team::team_models::Team; + use crate::utils::test_utils::{ + insert_new_team_in_redis, setup_pg_reader_client, setup_redis_client, }; - use crate::flag_request::FlagRequest; - use crate::team::Team; - use crate::test_utils::{insert_new_team_in_redis, setup_pg_reader_client, setup_redis_client}; use bytes::Bytes; use serde_json::json; diff --git a/rust/feature-flags/src/flags/mod.rs b/rust/feature-flags/src/flags/mod.rs new file mode 100644 index 00000000000..0555b993828 --- /dev/null +++ b/rust/feature-flags/src/flags/mod.rs @@ -0,0 +1,6 @@ +pub mod flag_analytics; +pub mod flag_match_reason; +pub mod flag_matching; +pub mod flag_models; +pub mod flag_operations; +pub mod flag_request; diff --git a/rust/feature-flags/src/lib.rs b/rust/feature-flags/src/lib.rs index 67659bfcf9d..9f2fa1d5d68 100644 --- a/rust/feature-flags/src/lib.rs +++ b/rust/feature-flags/src/lib.rs @@ -1,24 +1,13 @@ pub mod api; -pub mod cohort_cache; -pub mod cohort_models; -pub mod cohort_operations; +pub mod client; +pub mod cohort; pub mod config; -pub mod database; -pub mod feature_flag_match_reason; -pub mod flag_analytics; -pub mod flag_definitions; -pub mod flag_matching; -pub mod flag_request; -pub mod geoip; -pub mod metrics_consts; -pub mod metrics_utils; -pub mod property_matching; -pub mod redis; -pub mod request_handler; +pub mod flags; +pub mod metrics; +pub mod properties; pub mod router; pub mod server; pub mod team; -pub mod v0_endpoint; // Test modules don't need to be compiled with main binary // #[cfg(test)] @@ -26,4 +15,4 @@ pub mod v0_endpoint; // or make it a separate feature using cfg(feature = "integration-tests") // and then use this feature only in tests. // For now, ok to just include in binary -pub mod test_utils; +pub mod utils; diff --git a/rust/feature-flags/src/metrics_consts.rs b/rust/feature-flags/src/metrics/metrics_consts.rs similarity index 100% rename from rust/feature-flags/src/metrics_consts.rs rename to rust/feature-flags/src/metrics/metrics_consts.rs diff --git a/rust/feature-flags/src/metrics_utils.rs b/rust/feature-flags/src/metrics/metrics_utils.rs similarity index 98% rename from rust/feature-flags/src/metrics_utils.rs rename to rust/feature-flags/src/metrics/metrics_utils.rs index e17b4caf13d..1abd88e9474 100644 --- a/rust/feature-flags/src/metrics_utils.rs +++ b/rust/feature-flags/src/metrics/metrics_utils.rs @@ -1,4 +1,4 @@ -use crate::{api::FlagError, config::TeamIdsToTrack}; +use crate::{api::errors::FlagError, config::TeamIdsToTrack}; pub fn team_id_label_filter( team_ids_to_track: TeamIdsToTrack, diff --git a/rust/feature-flags/src/metrics/mod.rs b/rust/feature-flags/src/metrics/mod.rs new file mode 100644 index 00000000000..3b1364c3aed --- /dev/null +++ b/rust/feature-flags/src/metrics/mod.rs @@ -0,0 +1,2 @@ +pub mod metrics_consts; +pub mod metrics_utils; diff --git a/rust/feature-flags/src/properties/mod.rs b/rust/feature-flags/src/properties/mod.rs new file mode 100644 index 00000000000..2c3ad0067d3 --- /dev/null +++ b/rust/feature-flags/src/properties/mod.rs @@ -0,0 +1,2 @@ +pub mod property_matching; +pub mod property_models; diff --git a/rust/feature-flags/src/property_matching.rs b/rust/feature-flags/src/properties/property_matching.rs similarity index 99% rename from rust/feature-flags/src/property_matching.rs rename to rust/feature-flags/src/properties/property_matching.rs index 84479f13161..3389e82b211 100644 --- a/rust/feature-flags/src/property_matching.rs +++ b/rust/feature-flags/src/properties/property_matching.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use crate::flag_definitions::{OperatorType, PropertyFilter}; +use crate::properties::property_models::{OperatorType, PropertyFilter}; use regex::Regex; use serde_json::Value; diff --git a/rust/feature-flags/src/properties/property_models.rs b/rust/feature-flags/src/properties/property_models.rs new file mode 100644 index 00000000000..620c9175348 --- /dev/null +++ b/rust/feature-flags/src/properties/property_models.rs @@ -0,0 +1,38 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum OperatorType { + Exact, + IsNot, + Icontains, + NotIcontains, + Regex, + NotRegex, + Gt, + Lt, + Gte, + Lte, + IsSet, + IsNotSet, + IsDateExact, + IsDateAfter, + IsDateBefore, + In, + NotIn, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct PropertyFilter { + pub key: String, + // TODO: Probably need a default for value? + // incase operators like is_set, is_not_set are used + // not guaranteed to have a value, if say created via api + pub value: serde_json::Value, + pub operator: Option, + #[serde(rename = "type")] + // TODO: worth making a enum here to differentiate between cohort and person filters? + pub prop_type: String, + pub negation: Option, + pub group_type_index: Option, +} diff --git a/rust/feature-flags/src/router.rs b/rust/feature-flags/src/router.rs index e34ea31a3c6..46706586a07 100644 --- a/rust/feature-flags/src/router.rs +++ b/rust/feature-flags/src/router.rs @@ -9,13 +9,13 @@ use health::HealthRegistry; use tower::limit::ConcurrencyLimitLayer; use crate::{ - cohort_cache::CohortCacheManager, + api::endpoint, + client::{ + database::Client as DatabaseClient, geoip::GeoIpClient, redis::Client as RedisClient, + }, + cohort::cohort_cache_manager::CohortCacheManager, config::{Config, TeamIdsToTrack}, - database::Client as DatabaseClient, - geoip::GeoIpClient, - metrics_utils::team_id_label_filter, - redis::Client as RedisClient, - v0_endpoint, + metrics::metrics_utils::team_id_label_filter, }; #[derive(Clone)] @@ -56,7 +56,7 @@ where .route("/_liveness", get(move || ready(liveness.get_status()))); let flags_router = Router::new() - .route("/flags", post(v0_endpoint::flags).get(v0_endpoint::flags)) + .route("/flags", post(endpoint::flags).get(endpoint::flags)) .layer(ConcurrencyLimitLayer::new(config.max_concurrency)) .with_state(state); diff --git a/rust/feature-flags/src/server.rs b/rust/feature-flags/src/server.rs index 69ff759ddfc..12a79b4f1b4 100644 --- a/rust/feature-flags/src/server.rs +++ b/rust/feature-flags/src/server.rs @@ -6,11 +6,11 @@ use std::time::Duration; use health::{HealthHandle, HealthRegistry}; use tokio::net::TcpListener; -use crate::cohort_cache::CohortCacheManager; +use crate::client::database::get_pool; +use crate::client::geoip::GeoIpClient; +use crate::client::redis::RedisClient; +use crate::cohort::cohort_cache_manager::CohortCacheManager; use crate::config::Config; -use crate::database::get_pool; -use crate::geoip::GeoIpClient; -use crate::redis::RedisClient; use crate::router; pub async fn serve(config: Config, listener: TcpListener, shutdown: F) diff --git a/rust/feature-flags/src/team/mod.rs b/rust/feature-flags/src/team/mod.rs new file mode 100644 index 00000000000..4e6fe1869f0 --- /dev/null +++ b/rust/feature-flags/src/team/mod.rs @@ -0,0 +1,2 @@ +pub mod team_models; +pub mod team_operations; diff --git a/rust/feature-flags/src/team/team_models.rs b/rust/feature-flags/src/team/team_models.rs new file mode 100644 index 00000000000..29223825618 --- /dev/null +++ b/rust/feature-flags/src/team/team_models.rs @@ -0,0 +1,23 @@ +use serde::{Deserialize, Serialize}; + +// TRICKY: This cache data is coming from django-redis. If it ever goes out of sync, we'll bork. +// TODO: Add integration tests across repos to ensure this doesn't happen. +pub const TEAM_TOKEN_CACHE_PREFIX: &str = "posthog:1:team_token:"; + +#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)] +pub struct Team { + pub id: i32, + pub name: String, + pub api_token: String, + // TODO: the following fields are used for the `/decide` response, + // but they're not used for flags and they don't live in redis. + // At some point I'll need to differentiate between teams in Redis and teams + // with additional fields in Postgres, since the Postgres team is a superset of the fields + // we use for flags, anyway. + // pub surveys_opt_in: bool, + // pub heatmaps_opt_in: bool, + // pub capture_performance_opt_in: bool, + // pub autocapture_web_vitals_opt_in: bool, + // pub autocapture_opt_out: bool, + // pub autocapture_exceptions_opt_in: bool, +} diff --git a/rust/feature-flags/src/team.rs b/rust/feature-flags/src/team/team_operations.rs similarity index 79% rename from rust/feature-flags/src/team.rs rename to rust/feature-flags/src/team/team_operations.rs index f13cf29094b..4f9b706153c 100644 --- a/rust/feature-flags/src/team.rs +++ b/rust/feature-flags/src/team/team_operations.rs @@ -1,34 +1,15 @@ -use serde::{Deserialize, Serialize}; use std::sync::Arc; use tracing::instrument; -use crate::{api::FlagError, database::Client as DatabaseClient, redis::Client as RedisClient}; - -// TRICKY: This cache data is coming from django-redis. If it ever goes out of sync, we'll bork. -// TODO: Add integration tests across repos to ensure this doesn't happen. -pub const TEAM_TOKEN_CACHE_PREFIX: &str = "posthog:1:team_token:"; - -#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)] -pub struct Team { - pub id: i32, - pub name: String, - pub api_token: String, - // TODO: the following fields are used for the `/decide` response, - // but they're not used for flags and they don't live in redis. - // At some point I'll need to differentiate between teams in Redis and teams - // with additional fields in Postgres, since the Postgres team is a superset of the fields - // we use for flags, anyway. - // pub surveys_opt_in: bool, - // pub heatmaps_opt_in: bool, - // pub capture_performance_opt_in: bool, - // pub autocapture_web_vitals_opt_in: bool, - // pub autocapture_opt_out: bool, - // pub autocapture_exceptions_opt_in: bool, -} +use crate::{ + api::errors::FlagError, + client::database::Client as DatabaseClient, + client::redis::Client as RedisClient, + team::team_models::{Team, TEAM_TOKEN_CACHE_PREFIX}, +}; impl Team { /// Validates a token, and returns a team if it exists. - #[instrument(skip_all)] pub async fn from_redis( client: Arc, @@ -94,12 +75,9 @@ mod tests { use redis::AsyncCommands; use super::*; - use crate::{ - team, - test_utils::{ - insert_new_team_in_pg, insert_new_team_in_redis, random_string, setup_pg_reader_client, - setup_redis_client, - }, + use crate::utils::test_utils::{ + insert_new_team_in_pg, insert_new_team_in_redis, random_string, setup_pg_reader_client, + setup_redis_client, }; #[tokio::test] @@ -159,11 +137,7 @@ mod tests { .await .expect("Failed to get redis connection"); conn.set::( - format!( - "{}{}", - team::TEAM_TOKEN_CACHE_PREFIX, - team.api_token.clone() - ), + format!("{}{}", TEAM_TOKEN_CACHE_PREFIX, team.api_token.clone()), serialized_team, ) .await diff --git a/rust/feature-flags/src/utils/mod.rs b/rust/feature-flags/src/utils/mod.rs new file mode 100644 index 00000000000..681d26e346c --- /dev/null +++ b/rust/feature-flags/src/utils/mod.rs @@ -0,0 +1 @@ +pub mod test_utils; diff --git a/rust/feature-flags/src/test_utils.rs b/rust/feature-flags/src/utils/test_utils.rs similarity index 96% rename from rust/feature-flags/src/test_utils.rs rename to rust/feature-flags/src/utils/test_utils.rs index 346ed106ea6..ad108d08023 100644 --- a/rust/feature-flags/src/test_utils.rs +++ b/rust/feature-flags/src/utils/test_utils.rs @@ -6,12 +6,14 @@ use std::sync::Arc; use uuid::Uuid; use crate::{ - cohort_models::Cohort, + client::{ + database::{get_pool, Client, CustomDatabaseError}, + redis::{Client as RedisClientTrait, RedisClient}, + }, + cohort::cohort_models::Cohort, config::{Config, DEFAULT_TEST_CONFIG}, - database::{get_pool, Client, CustomDatabaseError}, - flag_definitions::{self, FeatureFlag, FeatureFlagRow}, - redis::{Client as RedisClientTrait, RedisClient}, - team::{self, Team}, + flags::flag_models::{FeatureFlag, FeatureFlagRow, TEAM_FLAGS_CACHE_PREFIX}, + team::team_models::{Team, TEAM_TOKEN_CACHE_PREFIX}, }; use rand::{distributions::Alphanumeric, Rng}; @@ -38,11 +40,7 @@ pub async fn insert_new_team_in_redis( let serialized_team = serde_json::to_string(&team)?; client .set( - format!( - "{}{}", - team::TEAM_TOKEN_CACHE_PREFIX, - team.api_token.clone() - ), + format!("{}{}", TEAM_TOKEN_CACHE_PREFIX, team.api_token.clone()), serialized_team, ) .await?; @@ -82,10 +80,7 @@ pub async fn insert_flags_for_team_in_redis( }; client - .set( - format!("{}{}", flag_definitions::TEAM_FLAGS_CACHE_PREFIX, team_id), - payload, - ) + .set(format!("{}{}", TEAM_FLAGS_CACHE_PREFIX, team_id), payload) .await?; Ok(()) diff --git a/rust/feature-flags/tests/test_flag_matching_consistency.rs b/rust/feature-flags/tests/test_flag_matching_consistency.rs index c632d28bc15..c31ac2094ad 100644 --- a/rust/feature-flags/tests/test_flag_matching_consistency.rs +++ b/rust/feature-flags/tests/test_flag_matching_consistency.rs @@ -1,13 +1,14 @@ use std::sync::Arc; -use feature_flags::cohort_cache::CohortCacheManager; -use feature_flags::feature_flag_match_reason::FeatureFlagMatchReason; /// These tests are common between all libraries doing local evaluation of feature flags. /// This ensures there are no mismatches between implementations. -use feature_flags::flag_matching::{FeatureFlagMatch, FeatureFlagMatcher}; - -use feature_flags::test_utils::{ - create_flag_from_json, setup_pg_reader_client, setup_pg_writer_client, +use feature_flags::{ + cohort::cohort_cache_manager::CohortCacheManager, + flags::{ + flag_match_reason::FeatureFlagMatchReason, + flag_matching::{FeatureFlagMatch, FeatureFlagMatcher}, + }, + utils::test_utils::{create_flag_from_json, setup_pg_reader_client, setup_pg_writer_client}, }; use serde_json::json; diff --git a/rust/feature-flags/tests/test_flags.rs b/rust/feature-flags/tests/test_flags.rs index 6b6263b4a77..918a73ede6f 100644 --- a/rust/feature-flags/tests/test_flags.rs +++ b/rust/feature-flags/tests/test_flags.rs @@ -7,7 +7,7 @@ use serde_json::{json, Value}; use crate::common::*; use feature_flags::config::DEFAULT_TEST_CONFIG; -use feature_flags::test_utils::{ +use feature_flags::utils::test_utils::{ insert_flags_for_team_in_redis, insert_new_team_in_pg, insert_new_team_in_redis, setup_pg_reader_client, setup_redis_client, };