diff --git a/.vscode/launch.json b/.vscode/launch.json index 98d4f1cdae0..93a654422db 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -97,7 +97,7 @@ "console": "integratedTerminal", "cwd": "${workspaceFolder}", "env": { - "SKIP_ASYNC_MIGRATIONS_SETUP": "0", + "SKIP_ASYNC_MIGRATIONS_SETUP": "1", "DEBUG": "1", "BILLING_SERVICE_URL": "https://billing.dev.posthog.dev", "SKIP_SERVICE_VERSION_REQUIREMENTS": "1" diff --git a/bin/hoge b/bin/hoge index 897de9b9856..26e1043b6f9 100755 --- a/bin/hoge +++ b/bin/hoge @@ -3,10 +3,13 @@ set -e if [[ "$@" == *".hog"* ]]; then exec python3 -m posthog.hogql.cli --compile "$@" +elif [[ "$@" == *".js"* ]]; then + exec python3 -m posthog.hogql.cli --compile "$@" else echo "$0 - the Hog compilër! 🦔+🕶️= Hoge" echo "" echo "Usage: bin/hoge [output.hoge] compile .hog into .hoge" + echo " bin/hoge compile .hog into .js" echo " bin/hog run .hog source code" echo " bin/hog run compiled .hoge bytecode" exit 1 diff --git a/cypress/e2e/experiments.cy.ts b/cypress/e2e/experiments.cy.ts index 9e661be3459..5a7d92c3f49 100644 --- a/cypress/e2e/experiments.cy.ts +++ b/cypress/e2e/experiments.cy.ts @@ -88,9 +88,7 @@ describe('Experiments', () => { // Wait for the goal modal to open and click the confirmation button cy.get('.LemonModal__layout').should('be.visible') cy.contains('Change experiment goal').should('be.visible') - cy.get('.LemonModal__footer').contains('button', 'Save').should('have.attr', 'aria-disabled', 'true') cy.get('.LemonModal__content').contains('button', 'Add funnel step').click() - cy.get('.LemonModal__footer').contains('button', 'Save').should('not.have.attr', 'aria-disabled', 'true') cy.get('.LemonModal__footer').contains('button', 'Save').click() } diff --git a/cypress/e2e/featureFlags.cy.ts b/cypress/e2e/featureFlags.cy.ts index abb1c40e522..75f07c59cb6 100644 --- a/cypress/e2e/featureFlags.cy.ts +++ b/cypress/e2e/featureFlags.cy.ts @@ -308,7 +308,8 @@ describe('Feature Flags', () => { cy.get('.operator-value-option').contains('> after').should('not.exist') }) - it('Allow setting multivariant rollout percentage to zero', () => { + it('Allows setting multivariant rollout percentage to zero', () => { + cy.get('[data-attr=top-bar-name]').should('contain', 'Feature flags') // Start creating a multivariant flag cy.get('[data-attr=new-feature-flag]').click() cy.get('[data-attr=feature-flag-served-value-segmented-button]') @@ -328,6 +329,15 @@ describe('Feature Flags', () => { cy.get('[data-attr=feature-flag-variant-rollout-percentage-input]').click().type(`4.5`).should('have.value', 4) }) + it('Sets URL properly when switching between tabs', () => { + cy.get('[data-attr=top-bar-name]').should('contain', 'Feature flags') + cy.get('[data-attr=feature-flags-tab-navigation]').contains('History').click() + cy.url().should('include', `tab=history`) + + cy.get('[data-attr=feature-flags-tab-navigation]').contains('Overview').click() + cy.url().should('include', `tab=overview`) + }) + it('Renders flags in FlagSelector', () => { // Create flag name cy.get('[data-attr=top-bar-name]').should('contain', 'Feature flags') diff --git a/cypress/e2e/surveys.cy.ts b/cypress/e2e/surveys.cy.ts index 28082edb3de..1cccfb545fc 100644 --- a/cypress/e2e/surveys.cy.ts +++ b/cypress/e2e/surveys.cy.ts @@ -269,6 +269,7 @@ describe('Surveys', () => { // Set responses limit cy.get('.LemonCollapsePanel').contains('Completion conditions').click() + cy.get('[data-attr=survey-collection-until-limit]').first().click() cy.get('[data-attr=survey-responses-limit-input]').focus().type('228').click() // Save the survey @@ -276,7 +277,7 @@ describe('Surveys', () => { cy.get('button[data-attr="launch-survey"]').should('have.text', 'Launch') cy.reload() - cy.contains('The survey will be stopped once 228 responses are received.').should('be.visible') + cy.contains('The survey will be stopped once 100228 responses are received.').should('be.visible') }) it('creates a new survey with branching logic', () => { diff --git a/ee/clickhouse/models/test/test_action.py b/ee/clickhouse/models/test/test_action.py index d4b3a32311a..b9aaf44a4c6 100644 --- a/ee/clickhouse/models/test/test_action.py +++ b/ee/clickhouse/models/test/test_action.py @@ -1,7 +1,7 @@ import dataclasses from posthog.client import sync_execute -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.hogql.hogql import HogQLContext from posthog.hogql.property import action_to_expr from posthog.models.action import Action diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index dc4a3170b93..d3a9f2afea3 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -31,7 +31,6 @@ from posthog.clickhouse.query_tagging import tag_queries from posthog.constants import INSIGHT_TRENDS from posthog.models.experiment import Experiment, ExperimentHoldout, ExperimentSavedMetric from posthog.models.filters.filter import Filter -from posthog.schema import ExperimentFunnelsQuery, ExperimentTrendsQuery from posthog.utils import generate_cache_key, get_safe_cache EXPERIMENT_RESULTS_CACHE_DEFAULT_TTL = 60 * 60 # 1 hour @@ -194,6 +193,7 @@ class ExperimentSerializer(serializers.ModelSerializer): "updated_at", "type", "metrics", + "metrics_secondary", ] read_only_fields = [ "id", @@ -235,36 +235,7 @@ class ExperimentSerializer(serializers.ModelSerializer): return value def validate_metrics(self, value): - # TODO: This isn't correct most probably, we wouldn't have experiment_id inside ExperimentTrendsQuery - # on creation. Not sure how this is supposed to work yet. - if not value: - return value - - if not isinstance(value, list): - raise ValidationError("Metrics must be a list") - - if len(value) > 10: - raise ValidationError("Experiments can have a maximum of 10 metrics") - - for metric in value: - if not isinstance(metric, dict): - raise ValidationError("Metrics must be objects") - if not metric.get("query"): - raise ValidationError("Metric query is required") - - if metric.get("type") not in ["primary", "secondary"]: - raise ValidationError("Metric type must be 'primary' or 'secondary'") - - metric_query = metric["query"] - - if metric_query.get("kind") not in ["ExperimentTrendsQuery", "ExperimentFunnelsQuery"]: - raise ValidationError("Metric query kind must be 'ExperimentTrendsQuery' or 'ExperimentFunnelsQuery'") - - # pydantic models are used to validate the query - if metric_query["kind"] == "ExperimentTrendsQuery": - ExperimentTrendsQuery(**metric_query) - else: - ExperimentFunnelsQuery(**metric_query) + # TODO 2024-11-15: commented code will be addressed when persistent metrics are implemented. return value @@ -285,8 +256,8 @@ class ExperimentSerializer(serializers.ModelSerializer): def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Experiment: is_draft = "start_date" not in validated_data or validated_data["start_date"] is None - if not validated_data.get("filters") and not is_draft: - raise ValidationError("Filters are required when creating a launched experiment") + # if not validated_data.get("filters") and not is_draft: + # raise ValidationError("Filters are required when creating a launched experiment") saved_metrics_data = validated_data.pop("saved_metrics_ids", []) @@ -301,11 +272,6 @@ class ExperimentSerializer(serializers.ModelSerializer): feature_flag_key = validated_data.pop("get_feature_flag_key") - properties = validated_data["filters"].get("properties", []) - - if properties: - raise ValidationError("Experiments do not support global filter properties") - holdout_groups = None if validated_data.get("holdout"): holdout_groups = validated_data["holdout"].filters @@ -315,8 +281,8 @@ class ExperimentSerializer(serializers.ModelSerializer): {"key": "test", "name": "Test Variant", "rollout_percentage": 50}, ] - filters = { - "groups": [{"properties": properties, "rollout_percentage": 100}], + feature_flag_filters = { + "groups": [{"properties": [], "rollout_percentage": 100}], "multivariate": {"variants": variants or default_variants}, "aggregation_group_type_index": aggregation_group_type_index, "holdout_groups": holdout_groups, @@ -326,8 +292,9 @@ class ExperimentSerializer(serializers.ModelSerializer): data={ "key": feature_flag_key, "name": f'Feature Flag for Experiment {validated_data["name"]}', - "filters": filters, + "filters": feature_flag_filters, "active": not is_draft, + "creation_context": "experiments", }, context=self.context, ) @@ -369,13 +336,13 @@ class ExperimentSerializer(serializers.ModelSerializer): return experiment def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwargs: Any) -> Experiment: - if ( - not instance.filters.get("events") - and not instance.filters.get("actions") - and validated_data.get("start_date") - and not validated_data.get("filters") - ): - raise ValidationError("Filters are required when launching an experiment") + # if ( + # not instance.filters.get("events") + # and not instance.filters.get("actions") + # and validated_data.get("start_date") + # and not validated_data.get("filters") + # ): + # raise ValidationError("Filters are required when launching an experiment") update_saved_metrics = "saved_metrics_ids" in validated_data saved_metrics_data = validated_data.pop("saved_metrics_ids", []) or [] @@ -408,6 +375,8 @@ class ExperimentSerializer(serializers.ModelSerializer): "archived", "secondary_metrics", "holdout", + "metrics", + "metrics_secondary", } given_keys = set(validated_data.keys()) extra_keys = given_keys - expected_keys diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 1b22bbaa13c..751b65ed082 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -1235,42 +1235,6 @@ class TestExperimentCRUD(APILicensedTest): self.assertIsNotNone(Experiment.objects.get(pk=id)) - def test_cant_add_global_properties_to_new_experiment(self): - ff_key = "a-b-tests" - response = self.client.post( - f"/api/projects/{self.team.id}/experiments/", - { - "name": "Test Experiment", - "description": "", - "start_date": None, - "end_date": None, - "feature_flag_key": ff_key, - "parameters": None, - "filters": { - "events": [ - {"order": 0, "id": "$pageview"}, - {"order": 1, "id": "$pageleave"}, - ], - "properties": [ - { - "key": "industry", - "type": "group", - "value": ["technology"], - "operator": "exact", - "group_type_index": 1, - } - ], - "aggregation_group_type_index": 1, - }, - }, - ) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json()["detail"], - "Experiments do not support global filter properties", - ) - def test_creating_updating_experiment_with_group_aggregation(self): ff_key = "a-b-tests" response = self.client.post( @@ -1789,79 +1753,6 @@ class TestExperimentCRUD(APILicensedTest): self.assertEqual(response.json()["name"], "Test Experiment") self.assertEqual(response.json()["feature_flag_key"], ff_key) - def test_create_launched_experiment_without_filters(self) -> None: - ff_key = "a-b-tests" - response = self.client.post( - f"/api/projects/{self.team.id}/experiments/", - { - "name": "Test Experiment", - "description": "", - "start_date": "2021-12-01T10:23", - "end_date": None, - "feature_flag_key": ff_key, - "parameters": None, - "filters": {}, - }, - ) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Filters are required when creating a launched experiment") - - def test_launch_draft_experiment_without_filters(self) -> None: - ff_key = "a-b-tests" - response = self.client.post( - f"/api/projects/{self.team.id}/experiments/", - { - "name": "Test Experiment", - "description": "", - "start_date": None, - "end_date": None, - "feature_flag_key": ff_key, - "parameters": None, - "filters": {}, - }, - ) - - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - draft_exp = response.json() - - response = self.client.patch( - f"/api/projects/{self.team.id}/experiments/{draft_exp['id']}", - { - "name": "Test Experiment", - "description": "", - "start_date": "2021-12-01T10:23", - "end_date": None, - "feature_flag_key": ff_key, - "parameters": None, - "filters": {}, - }, - ) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Filters are required when launching an experiment") - - response = self.client.patch( - f"/api/projects/{self.team.id}/experiments/{draft_exp['id']}", - { - "name": "Test Experiment", - "description": "", - "start_date": "2021-12-01T10:23", - "end_date": None, - "feature_flag_key": ff_key, - "parameters": None, - "filters": { - "events": [ - {"order": 0, "id": "$pageview"}, - {"order": 1, "id": "$pageleave"}, - ], - "properties": [], - }, - }, - ) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - class TestExperimentAuxiliaryEndpoints(ClickhouseTestMixin, APILicensedTest): def _generate_experiment(self, start_date="2024-01-01T10:23", extra_parameters=None): diff --git a/ee/hogai/assistant.py b/ee/hogai/assistant.py index 37a171bf036..df200ae869c 100644 --- a/ee/hogai/assistant.py +++ b/ee/hogai/assistant.py @@ -5,6 +5,7 @@ from langchain_core.messages import AIMessageChunk from langfuse.callback import CallbackHandler from langgraph.graph.state import StateGraph from pydantic import BaseModel +from sentry_sdk import capture_exception from ee import settings from ee.hogai.funnels.nodes import ( @@ -15,6 +16,7 @@ from ee.hogai.funnels.nodes import ( ) from ee.hogai.router.nodes import RouterNode from ee.hogai.schema_generator.nodes import SchemaGeneratorNode +from ee.hogai.summarizer.nodes import SummarizerNode from ee.hogai.trends.nodes import ( TrendsGeneratorNode, TrendsGeneratorToolsNode, @@ -26,6 +28,8 @@ from posthog.models.team.team import Team from posthog.schema import ( AssistantGenerationStatusEvent, AssistantGenerationStatusType, + AssistantMessage, + FailureMessage, VisualizationMessage, ) @@ -123,7 +127,7 @@ class Assistant: generate_trends_node.router, path_map={ "tools": AssistantNodeName.TRENDS_GENERATOR_TOOLS, - "next": AssistantNodeName.END, + "next": AssistantNodeName.SUMMARIZER, }, ) @@ -160,10 +164,14 @@ class Assistant: generate_trends_node.router, path_map={ "tools": AssistantNodeName.FUNNEL_GENERATOR_TOOLS, - "next": AssistantNodeName.END, + "next": AssistantNodeName.SUMMARIZER, }, ) + summarizer_node = SummarizerNode(self._team) + builder.add_node(AssistantNodeName.SUMMARIZER, summarizer_node.run) + builder.add_edge(AssistantNodeName.SUMMARIZER, AssistantNodeName.END) + return builder.compile() def stream(self, conversation: Conversation) -> Generator[BaseModel, None, None]: @@ -185,33 +193,47 @@ class Assistant: # Send a chunk to establish the connection avoiding the worker's timeout. yield AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.ACK) - for update in generator: - if is_state_update(update): - _, new_state = update - state = new_state + try: + for update in generator: + if is_state_update(update): + _, new_state = update + state = new_state - elif is_value_update(update): - _, state_update = update + elif is_value_update(update): + _, state_update = update - if AssistantNodeName.ROUTER in state_update and "messages" in state_update[AssistantNodeName.ROUTER]: - yield state_update[AssistantNodeName.ROUTER]["messages"][0] - elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): - # Reset chunks when schema validation fails. - chunks = AIMessageChunk(content="") + if ( + AssistantNodeName.ROUTER in state_update + and "messages" in state_update[AssistantNodeName.ROUTER] + ): + yield state_update[AssistantNodeName.ROUTER]["messages"][0] + elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): + # Reset chunks when schema validation fails. + chunks = AIMessageChunk(content="") - node_name = intersected_nodes.pop() - if "messages" in state_update[node_name]: - yield state_update[node_name]["messages"][0] - elif state_update[node_name].get("intermediate_steps", []): - yield AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) - - elif is_message_update(update): - langchain_message, langgraph_state = update[1] - for node_name, viz_node in VISUALIZATION_NODES.items(): - if langgraph_state["langgraph_node"] == node_name and isinstance(langchain_message, AIMessageChunk): - chunks += langchain_message # type: ignore - parsed_message = viz_node.parse_output(chunks.tool_calls[0]["args"]) - if parsed_message: - yield VisualizationMessage( - reasoning_steps=parsed_message.reasoning_steps, answer=parsed_message.answer + node_name = intersected_nodes.pop() + if "messages" in state_update[node_name]: + yield state_update[node_name]["messages"][0] + elif state_update[node_name].get("intermediate_steps", []): + yield AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) + elif AssistantNodeName.SUMMARIZER in state_update: + chunks = AIMessageChunk(content="") + yield state_update[AssistantNodeName.SUMMARIZER]["messages"][0] + elif is_message_update(update): + langchain_message, langgraph_state = update[1] + if isinstance(langchain_message, AIMessageChunk): + if langgraph_state["langgraph_node"] in VISUALIZATION_NODES.keys(): + chunks += langchain_message # type: ignore + parsed_message = VISUALIZATION_NODES[langgraph_state["langgraph_node"]].parse_output( + chunks.tool_calls[0]["args"] ) + if parsed_message: + yield VisualizationMessage( + reasoning_steps=parsed_message.reasoning_steps, answer=parsed_message.answer + ) + elif langgraph_state["langgraph_node"] == AssistantNodeName.SUMMARIZER: + chunks += langchain_message # type: ignore + yield AssistantMessage(content=chunks.content) + except Exception as e: + capture_exception(e) + yield FailureMessage() # This is an unhandled error, so we just stop further generation at this point diff --git a/ee/hogai/funnels/test/test_nodes.py b/ee/hogai/funnels/test/test_nodes.py index d7b42f7a872..59ba48ff6fa 100644 --- a/ee/hogai/funnels/test/test_nodes.py +++ b/ee/hogai/funnels/test/test_nodes.py @@ -33,7 +33,9 @@ class TestFunnelsGeneratorNode(ClickhouseTestMixin, APIBaseTest): self.assertEqual( new_state, { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"])], + "messages": [ + VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"], done=True) + ], "intermediate_steps": None, }, ) diff --git a/ee/hogai/router/test/test_router.py b/ee/hogai/router/test/test_nodes.py similarity index 100% rename from ee/hogai/router/test/test_router.py rename to ee/hogai/router/test/test_nodes.py diff --git a/ee/hogai/schema_generator/nodes.py b/ee/hogai/schema_generator/nodes.py index f6e41c69027..6470c52c4fe 100644 --- a/ee/hogai/schema_generator/nodes.py +++ b/ee/hogai/schema_generator/nodes.py @@ -101,6 +101,7 @@ class SchemaGeneratorNode(AssistantNode, Generic[Q]): plan=generated_plan, reasoning_steps=message.reasoning_steps, answer=message.answer, + done=True, ) ], "intermediate_steps": None, diff --git a/ee/hogai/schema_generator/test/test_nodes.py b/ee/hogai/schema_generator/test/test_nodes.py index 961bd88de22..25f82e43d44 100644 --- a/ee/hogai/schema_generator/test/test_nodes.py +++ b/ee/hogai/schema_generator/test/test_nodes.py @@ -54,7 +54,9 @@ class TestSchemaGeneratorNode(ClickhouseTestMixin, APIBaseTest): self.assertEqual( new_state, { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"])], + "messages": [ + VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"], done=True) + ], "intermediate_steps": None, }, ) diff --git a/ee/hogai/summarizer/__init__.py b/ee/hogai/summarizer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/ee/hogai/summarizer/nodes.py b/ee/hogai/summarizer/nodes.py new file mode 100644 index 00000000000..8d5e8a406f4 --- /dev/null +++ b/ee/hogai/summarizer/nodes.py @@ -0,0 +1,95 @@ +import json +from time import sleep +from django.conf import settings +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnableConfig +from langchain_openai import ChatOpenAI +from django.core.serializers.json import DjangoJSONEncoder +from rest_framework.exceptions import APIException +from sentry_sdk import capture_exception + +from ee.hogai.summarizer.prompts import SUMMARIZER_SYSTEM_PROMPT, SUMMARIZER_INSTRUCTION_PROMPT +from ee.hogai.utils import AssistantNode, AssistantNodeName, AssistantState +from posthog.api.services.query import process_query_dict +from posthog.clickhouse.client.execute_async import get_query_status +from posthog.errors import ExposedCHQueryError +from posthog.hogql.errors import ExposedHogQLError +from posthog.hogql_queries.query_runner import ExecutionMode +from posthog.schema import AssistantMessage, FailureMessage, HumanMessage, VisualizationMessage + + +class SummarizerNode(AssistantNode): + name = AssistantNodeName.SUMMARIZER + + def run(self, state: AssistantState, config: RunnableConfig): + viz_message = state["messages"][-1] + if not isinstance(viz_message, VisualizationMessage): + raise ValueError("Can only run summarization with a visualization message as the last one in the state") + if viz_message.answer is None: + raise ValueError("Did not found query in the visualization message") + + try: + results_response = process_query_dict( # type: ignore + self._team, # TODO: Add user + viz_message.answer.model_dump(mode="json"), # We need mode="json" so that + # Celery doesn't run in tests, so there we use force_blocking instead + # This does mean that the waiting logic is not tested + execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE + if not settings.TEST + else ExecutionMode.CALCULATE_BLOCKING_ALWAYS, + ).model_dump(mode="json") + if results_response.get("query_status") and not results_response["query_status"]["complete"]: + query_id = results_response["query_status"]["id"] + for i in range(0, 999): + sleep(i / 2) # We start at 0.5s and every iteration we wait 0.5s more + query_status = get_query_status(team_id=self._team.pk, query_id=query_id) + if query_status.error: + if query_status.error_message: + raise APIException(query_status.error_message) + else: + raise ValueError("Query failed") + if query_status.complete: + results_response = query_status.results + break + except (APIException, ExposedHogQLError, ExposedCHQueryError) as err: + err_message = str(err) + if isinstance(err, APIException): + if isinstance(err.detail, dict): + err_message = ", ".join(f"{key}: {value}" for key, value in err.detail.items()) + elif isinstance(err.detail, list): + err_message = ", ".join(map(str, err.detail)) + return {"messages": [FailureMessage(content=f"There was an error running this query: {err_message}")]} + except Exception as err: + capture_exception(err) + return {"messages": [FailureMessage(content="There was an unknown error running this query.")]} + + summarization_prompt = ChatPromptTemplate(self._construct_messages(state), template_format="mustache") + + chain = summarization_prompt | self._model + + message = chain.invoke( + { + "query_kind": viz_message.answer.kind, + "product_description": self._team.project.product_description, + "results": json.dumps(results_response["results"], cls=DjangoJSONEncoder), + }, + config, + ) + + return {"messages": [AssistantMessage(content=str(message.content), done=True)]} + + @property + def _model(self): + return ChatOpenAI(model="gpt-4o", temperature=0.5, streaming=True) # Slightly higher temp than earlier steps + + def _construct_messages(self, state: AssistantState) -> list[tuple[str, str]]: + conversation: list[tuple[str, str]] = [("system", SUMMARIZER_SYSTEM_PROMPT)] + + for message in state.get("messages", []): + if isinstance(message, HumanMessage): + conversation.append(("human", message.content)) + elif isinstance(message, AssistantMessage): + conversation.append(("assistant", message.content)) + + conversation.append(("human", SUMMARIZER_INSTRUCTION_PROMPT)) + return conversation diff --git a/ee/hogai/summarizer/prompts.py b/ee/hogai/summarizer/prompts.py new file mode 100644 index 00000000000..bf2272d9d4c --- /dev/null +++ b/ee/hogai/summarizer/prompts.py @@ -0,0 +1,17 @@ +SUMMARIZER_SYSTEM_PROMPT = """ +Act as an expert product manager. Your task is to summarize query results in a a concise way. +Offer actionable feedback if possible. Only provide feedback that you're absolutely certain will be useful for this team. + +The product being analyzed is described as follows: +{{product_description}}""" + +SUMMARIZER_INSTRUCTION_PROMPT = """ +Here are the {{query_kind}} results for this question: +```json +{{results}} +``` + +Answer my earlier question using the results above. Point out interesting trends or anomalies. +Take into account what you know about my product. If possible, offer actionable feedback, but avoid generic advice. +Limit yourself to a few sentences. The answer needs to be high-impact and relevant for me as a Silicon Valley engineer. +""" diff --git a/ee/hogai/summarizer/test/__init__.py b/ee/hogai/summarizer/test/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/ee/hogai/summarizer/test/test_nodes.py b/ee/hogai/summarizer/test/test_nodes.py new file mode 100644 index 00000000000..b0e8cdcd37f --- /dev/null +++ b/ee/hogai/summarizer/test/test_nodes.py @@ -0,0 +1,196 @@ +from unittest.mock import patch + +from django.test import override_settings +from langchain_core.runnables import RunnableLambda +from langchain_core.messages import ( + HumanMessage as LangchainHumanMessage, +) +from ee.hogai.summarizer.nodes import SummarizerNode +from ee.hogai.summarizer.prompts import SUMMARIZER_INSTRUCTION_PROMPT, SUMMARIZER_SYSTEM_PROMPT +from posthog.schema import ( + AssistantMessage, + AssistantTrendsEventsNode, + AssistantTrendsQuery, + FailureMessage, + HumanMessage, + VisualizationMessage, +) +from rest_framework.exceptions import ValidationError +from posthog.test.base import APIBaseTest, ClickhouseTestMixin +from posthog.api.services.query import process_query_dict + + +@override_settings(IN_UNIT_TESTING=True) +class TestSummarizerNode(ClickhouseTestMixin, APIBaseTest): + maxDiff = None + + @patch("ee.hogai.summarizer.nodes.process_query_dict", side_effect=process_query_dict) + def test_node_runs(self, mock_process_query_dict): + node = SummarizerNode(self.team) + with patch.object(SummarizerNode, "_model") as generator_model_mock: + generator_model_mock.return_value = RunnableLambda( + lambda _: LangchainHumanMessage(content="The results indicate foobar.") + ) + new_state = node.run( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage( + answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), + plan="Plan", + reasoning_steps=["step"], + done=True, + ), + ], + "plan": "Plan", + }, + {}, + ) + mock_process_query_dict.assert_called_once() # Query processing started + self.assertEqual( + new_state, + { + "messages": [ + AssistantMessage(content="The results indicate foobar.", done=True), + ], + }, + ) + + @patch( + "ee.hogai.summarizer.nodes.process_query_dict", + side_effect=ValueError("You have not glibbled the glorp before running this."), + ) + def test_node_handles_internal_error(self, mock_process_query_dict): + node = SummarizerNode(self.team) + with patch.object(SummarizerNode, "_model") as generator_model_mock: + generator_model_mock.return_value = RunnableLambda( + lambda _: LangchainHumanMessage(content="The results indicate foobar.") + ) + new_state = node.run( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage( + answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), + plan="Plan", + reasoning_steps=["step"], + done=True, + ), + ], + "plan": "Plan", + }, + {}, + ) + mock_process_query_dict.assert_called_once() # Query processing started + self.assertEqual( + new_state, + { + "messages": [ + FailureMessage(content="There was an unknown error running this query."), + ], + }, + ) + + @patch( + "ee.hogai.summarizer.nodes.process_query_dict", + side_effect=ValidationError( + "This query exceeds the capabilities of our picolator. Try de-brolling its flim-flam." + ), + ) + def test_node_handles_exposed_error(self, mock_process_query_dict): + node = SummarizerNode(self.team) + with patch.object(SummarizerNode, "_model") as generator_model_mock: + generator_model_mock.return_value = RunnableLambda( + lambda _: LangchainHumanMessage(content="The results indicate foobar.") + ) + new_state = node.run( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage( + answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), + plan="Plan", + reasoning_steps=["step"], + done=True, + ), + ], + "plan": "Plan", + }, + {}, + ) + mock_process_query_dict.assert_called_once() # Query processing started + self.assertEqual( + new_state, + { + "messages": [ + FailureMessage( + content=( + "There was an error running this query: This query exceeds the capabilities of our picolator. " + "Try de-brolling its flim-flam." + ) + ), + ], + }, + ) + + def test_node_requires_a_viz_message_in_state(self): + node = SummarizerNode(self.team) + + with self.assertRaisesMessage( + ValueError, "Can only run summarization with a visualization message as the last one in the state" + ): + node.run( + { + "messages": [ + HumanMessage(content="Text"), + ], + "plan": "Plan", + }, + {}, + ) + + def test_node_requires_viz_message_in_state_to_have_query(self): + node = SummarizerNode(self.team) + + with self.assertRaisesMessage(ValueError, "Did not found query in the visualization message"): + node.run( + { + "messages": [ + VisualizationMessage( + answer=None, + plan="Plan", + reasoning_steps=["step"], + done=True, + ), + ], + "plan": "Plan", + }, + {}, + ) + + def test_agent_reconstructs_conversation(self): + self.project.product_description = "Dating app for lonely hedgehogs." + self.project.save() + node = SummarizerNode(self.team) + + history = node._construct_messages( + { + "messages": [ + HumanMessage(content="What's the trends in signups?"), + VisualizationMessage( + answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), + plan="Plan", + reasoning_steps=["step"], + done=True, + ), + ] + } + ) + self.assertEqual( + history, + [ + ("system", SUMMARIZER_SYSTEM_PROMPT), + ("human", "What's the trends in signups?"), + ("human", SUMMARIZER_INSTRUCTION_PROMPT), + ], + ) diff --git a/ee/hogai/taxonomy_agent/nodes.py b/ee/hogai/taxonomy_agent/nodes.py index 06d3b29df1a..ef5c1953b99 100644 --- a/ee/hogai/taxonomy_agent/nodes.py +++ b/ee/hogai/taxonomy_agent/nodes.py @@ -75,10 +75,8 @@ class TaxonomyAgentPlannerNode(AssistantNode): AgentAction, agent.invoke( { - "react_format": REACT_FORMAT_PROMPT, + "react_format": self._get_react_format_prompt(toolkit), "react_format_reminder": REACT_FORMAT_REMINDER_PROMPT, - "tools": toolkit.render_text_description(), - "tool_names": ", ".join([t["name"] for t in toolkit.tools]), "product_description": self._team.project.product_description, "groups": self._team_group_types, "events": self._events_prompt, @@ -121,6 +119,17 @@ class TaxonomyAgentPlannerNode(AssistantNode): def _model(self) -> ChatOpenAI: return ChatOpenAI(model="gpt-4o", temperature=0.2, streaming=True) + def _get_react_format_prompt(self, toolkit: TaxonomyAgentToolkit) -> str: + return cast( + str, + ChatPromptTemplate.from_template(REACT_FORMAT_PROMPT, template_format="mustache") + .format_messages( + tools=toolkit.render_text_description(), + tool_names=", ".join([t["name"] for t in toolkit.tools]), + )[0] + .content, + ) + @cached_property def _events_prompt(self) -> str: response = TeamTaxonomyQueryRunner(TeamTaxonomyQuery(), self._team).run( diff --git a/ee/hogai/taxonomy_agent/test/test_nodes.py b/ee/hogai/taxonomy_agent/test/test_nodes.py index 920dfacfb85..1a5fe25d8bf 100644 --- a/ee/hogai/taxonomy_agent/test/test_nodes.py +++ b/ee/hogai/taxonomy_agent/test/test_nodes.py @@ -22,7 +22,7 @@ from posthog.schema import ( from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person -class TestToolkit(TaxonomyAgentToolkit): +class DummyToolkit(TaxonomyAgentToolkit): def _get_tools(self) -> list[ToolkitTool]: return self._default_tools @@ -36,8 +36,8 @@ class TestTaxonomyAgentPlannerNode(ClickhouseTestMixin, APIBaseTest): def _get_node(self): class Node(TaxonomyAgentPlannerNode): def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: - prompt = ChatPromptTemplate.from_messages([("user", "test")]) - toolkit = TestToolkit(self._team) + prompt: ChatPromptTemplate = ChatPromptTemplate.from_messages([("user", "test")]) + toolkit = DummyToolkit(self._team) return super()._run_with_prompt_and_toolkit(state, prompt, toolkit, config=config) return Node(self.team) @@ -180,13 +180,21 @@ class TestTaxonomyAgentPlannerNode(ClickhouseTestMixin, APIBaseTest): node._events_prompt, ) + def test_format_prompt(self): + node = self._get_node() + self.assertNotIn("Human:", node._get_react_format_prompt(DummyToolkit(self.team))) + self.assertIn("retrieve_event_properties,", node._get_react_format_prompt(DummyToolkit(self.team))) + self.assertIn( + "retrieve_event_properties(event_name: str)", node._get_react_format_prompt(DummyToolkit(self.team)) + ) + @override_settings(IN_UNIT_TESTING=True) class TestTaxonomyAgentPlannerToolsNode(ClickhouseTestMixin, APIBaseTest): def _get_node(self): class Node(TaxonomyAgentPlannerToolsNode): def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: - toolkit = TestToolkit(self._team) + toolkit = DummyToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) return Node(self.team) diff --git a/ee/hogai/trends/test/test_nodes.py b/ee/hogai/trends/test/test_nodes.py index 334d321f5ae..03c2ac85ea7 100644 --- a/ee/hogai/trends/test/test_nodes.py +++ b/ee/hogai/trends/test/test_nodes.py @@ -14,6 +14,8 @@ from posthog.test.base import APIBaseTest, ClickhouseTestMixin @override_settings(IN_UNIT_TESTING=True) class TestTrendsGeneratorNode(ClickhouseTestMixin, APIBaseTest): + maxDiff = None + def setUp(self): self.schema = AssistantTrendsQuery(series=[]) @@ -33,7 +35,9 @@ class TestTrendsGeneratorNode(ClickhouseTestMixin, APIBaseTest): self.assertEqual( new_state, { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"])], + "messages": [ + VisualizationMessage(answer=self.schema, plan="Plan", reasoning_steps=["step"], done=True) + ], "intermediate_steps": None, }, ) diff --git a/ee/hogai/utils.py b/ee/hogai/utils.py index 74e12ee2d75..60fa74fc230 100644 --- a/ee/hogai/utils.py +++ b/ee/hogai/utils.py @@ -50,6 +50,7 @@ class AssistantNodeName(StrEnum): FUNNEL_PLANNER_TOOLS = "funnel_planner_tools" FUNNEL_GENERATOR = "funnel_generator" FUNNEL_GENERATOR_TOOLS = "funnel_generator_tools" + SUMMARIZER = "summarizer" class AssistantNode(ABC): diff --git a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png index 202f1a5b6cb..50d668e3f3b 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png and b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png index acf86648a2d..2f226c7c05f 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png and b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png b/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png index 80e85acfb3a..67811b05acb 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png and b/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--importing-module--light.png b/frontend/__snapshots__/components-errors-error-display--importing-module--light.png index bcb070820ba..046a20b8c39 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--importing-module--light.png and b/frontend/__snapshots__/components-errors-error-display--importing-module--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png index ea3e5b099ec..3885c148fec 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png and b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png index 540d3972b08..a63f72eabf2 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png and b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png b/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png index fa4a2806613..6b2259c04bc 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png and b/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png b/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png index ef06a5729bc..43b0d6d315a 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png and b/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--dark.png b/frontend/__snapshots__/components-playerinspector--default--dark.png new file mode 100644 index 00000000000..e1d088184e1 Binary files /dev/null and b/frontend/__snapshots__/components-playerinspector--default--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--light.png b/frontend/__snapshots__/components-playerinspector--default--light.png new file mode 100644 index 00000000000..e9aabac97d7 Binary files /dev/null and b/frontend/__snapshots__/components-playerinspector--default--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index 65789fcd50f..38d5bc497d4 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 6afb89461f2..10b0e390935 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 78ea79ea3f7..25ab8cd06fd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--light.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--light.png index c22f4a19b18..fbc4e442860 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png index eedf3e01916..8c043460ab5 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png index c19e174167b..a042e88608e 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png index 84a2a7828cd..69db9b395ec 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png index 43ad8593f41..4a9c5c33c2c 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png index 80aded4c794..3e3689533ff 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png index 22c88333171..beb2dab935b 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--dark.png index eccd3471a1f..e22dc0db870 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--light.png index da0efa6c70d..483b9a3ff88 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-failed-generation--light.png differ diff --git a/frontend/public/services/brevo.png b/frontend/public/services/brevo.png new file mode 100644 index 00000000000..65dfdbac803 Binary files /dev/null and b/frontend/public/services/brevo.png differ diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index d91ab7592de..c4e17361415 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -719,6 +719,10 @@ class ApiRequest { return this.errorTracking().addPathComponent('upload_source_maps') } + public errorTrackingStackFrames(ids: string[]): ApiRequest { + return this.errorTracking().addPathComponent('stack_frames').withQueryString({ ids }) + } + // # Warehouse public dataWarehouseTables(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('warehouse_tables') @@ -1857,6 +1861,10 @@ const api = { async uploadSourceMaps(data: FormData): Promise<{ content: string }> { return await new ApiRequest().errorTrackingUploadSourceMaps().create({ data }) }, + + async fetchStackFrames(ids: string[]): Promise<{ content: string }> { + return await new ApiRequest().errorTrackingStackFrames(ids).get() + }, }, recordings: { diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.scss b/frontend/src/lib/components/Errors/ErrorDisplay.scss new file mode 100644 index 00000000000..11b989d2022 --- /dev/null +++ b/frontend/src/lib/components/Errors/ErrorDisplay.scss @@ -0,0 +1,6 @@ +.ErrorDisplay__stacktrace { + .LemonCollapsePanel__header { + min-height: 2.375rem !important; + padding: 0.25rem !important; + } +} diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.tsx b/frontend/src/lib/components/Errors/ErrorDisplay.tsx index 58770420674..604b80fcf68 100644 --- a/frontend/src/lib/components/Errors/ErrorDisplay.tsx +++ b/frontend/src/lib/components/Errors/ErrorDisplay.tsx @@ -1,71 +1,67 @@ +import './ErrorDisplay.scss' + import { IconFlag } from '@posthog/icons' +import { LemonCollapse } from '@posthog/lemon-ui' import { TitledSnack } from 'lib/components/TitledSnack' import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch' import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' import { Link } from 'lib/lemon-ui/Link' -import posthog from 'posthog-js' import { useState } from 'react' import { EventType } from '~/types' -interface StackFrame { - filename: string - lineno: number - colno: number - function: string - context_line?: string - in_app?: boolean +import { StackFrame } from './stackFrameLogic' + +interface RawStackTrace { + type: 'raw' + frames: StackFrame[] +} +interface ResolvedStackTrace { + type: 'resolved' + frames: StackFrame[] } -interface ExceptionTrace { - stacktrace: { - frames: StackFrame[] - } +interface Exception { + stacktrace: ResolvedStackTrace | RawStackTrace module: string type: string value: string } -function parseToFrames(rawTrace: string): StackFrame[] { - return JSON.parse(rawTrace) +function StackTrace({ frames, showAllFrames }: { frames: StackFrame[]; showAllFrames: boolean }): JSX.Element | null { + const displayFrames = showAllFrames ? frames : frames.filter((f) => f.in_app) + + const panels = displayFrames.map(({ filename, lineno, colno, function: functionName }, index) => { + return { + key: index, + header: ( +
+ {filename} + {functionName ? ( +
+ in + {functionName} +
+ ) : null} + {lineno && colno ? ( +
+ at line + + {lineno}:{colno} + +
+ ) : null} +
+ ), + content: null, + } + }) + + return } -function StackTrace({ rawTrace, showAllFrames }: { rawTrace: string; showAllFrames: boolean }): JSX.Element | null { - try { - const frames = parseToFrames(rawTrace) - return ( - <> - {frames.length ? ( - frames.map((frame, index) => { - const { filename, lineno, colno, function: functionName, context_line, in_app } = frame - - return showAllFrames || in_app ? ( - - {filename}:{lineno}:{colno} - {context_line ? `:${context_line}` : ''} - - } - /> - ) : null - }) - ) : ( - Empty stack trace - )} - - ) - } catch (e: any) { - //very meta - posthog.capture('Cannot parse stack trace in Exception event', { tag: 'error-display-stack-trace', e }) - return Error parsing stack trace - } -} - -function ChainedStackTraces({ exceptionList }: { exceptionList: ExceptionTrace[] }): JSX.Element { +function ChainedStackTraces({ exceptionList }: { exceptionList: Exception[] }): JSX.Element { const [showAllFrames, setShowAllFrames] = useState(false) return ( @@ -89,9 +85,9 @@ function ChainedStackTraces({ exceptionList }: { exceptionList: ExceptionTrace[] } return ( -
+

{value}

- +
) })} diff --git a/frontend/src/lib/components/Errors/stackFrameLogic.tsx b/frontend/src/lib/components/Errors/stackFrameLogic.tsx new file mode 100644 index 00000000000..3852055d12b --- /dev/null +++ b/frontend/src/lib/components/Errors/stackFrameLogic.tsx @@ -0,0 +1,30 @@ +import { kea, path } from 'kea' +import { loaders } from 'kea-loaders' +import api from 'lib/api' + +import type { stackFrameLogicType } from './stackFrameLogicType' + +export interface StackFrame { + filename: string + lineno: number + colno: number + function: string + in_app?: boolean +} + +export const stackFrameLogic = kea([ + path(['components', 'Errors', 'stackFrameLogic']), + loaders(({ values }) => ({ + stackFrames: [ + {} as Record, + { + loadFrames: async ({ frameIds }: { frameIds: string[] }) => { + const loadedFrameIds = Object.keys(values.stackFrames) + const ids = frameIds.filter((id) => loadedFrameIds.includes(id)) + await api.errorTracking.fetchStackFrames(ids) + return {} + }, + }, + ], + })), +]) diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 881e9691b4f..b3edb1d6971 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -169,6 +169,7 @@ export const FEATURE_FLAGS = { SURVEYS_EVENTS: 'surveys-events', // owner: #team-feature-success SURVEYS_ACTIONS: 'surveys-actions', // owner: #team-feature-success SURVEYS_RECURRING: 'surveys-recurring', // owner: #team-feature-success + SURVEYS_ADAPTIVE_COLLECTION: 'surveys-recurring', // owner: #team-feature-success YEAR_IN_HOG: 'year-in-hog', // owner: #team-replay SESSION_REPLAY_EXPORT_MOBILE_DATA: 'session-replay-export-mobile-data', // owner: #team-replay DISCUSSIONS: 'discussions', // owner: #team-replay diff --git a/frontend/src/lib/hooks/useUploadFiles.ts b/frontend/src/lib/hooks/useUploadFiles.ts index f486c506495..62d09217cbd 100644 --- a/frontend/src/lib/hooks/useUploadFiles.ts +++ b/frontend/src/lib/hooks/useUploadFiles.ts @@ -1,5 +1,5 @@ import api from 'lib/api' -import { useEffect, useState } from 'react' +import { useEffect, useRef, useState } from 'react' import { MediaUploadResponse } from '~/types' @@ -47,14 +47,17 @@ export function useUploadFiles({ } { const [uploading, setUploading] = useState(false) const [filesToUpload, setFilesToUpload] = useState([]) + const uploadInProgressRef = useRef(false) + useEffect(() => { const uploadFiles = async (): Promise => { - if (filesToUpload.length === 0) { + if (filesToUpload.length === 0 || uploadInProgressRef.current) { setUploading(false) return } try { + uploadInProgressRef.current = true setUploading(true) const file: File = filesToUpload[0] const media = await uploadFile(file) @@ -63,6 +66,7 @@ export function useUploadFiles({ const errorDetail = (error as any).detail || 'unknown error' onError(errorDetail) } finally { + uploadInProgressRef.current = false setUploading(false) setFilesToUpload([]) } diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss index 6e7dfba7a76..5a27fde4136 100644 --- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss +++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss @@ -32,6 +32,11 @@ &.LemonButton:active { transform: inherit; } + + &--disabled:hover { + cursor: default; + background-color: var(--bg-light) !important; + } } .LemonCollapsePanel__body { diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx index 640bac2ab6d..36e67c77447 100644 --- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx +++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx @@ -116,18 +116,28 @@ function LemonCollapsePanel({ return (
- { - onHeaderClick && onHeaderClick() - onChange(!isExpanded) - }} - icon={isExpanded ? : } - className="LemonCollapsePanel__header" - {...(dataAttr ? { 'data-attr': dataAttr } : {})} - size={size} - > - {header} - + {content ? ( + { + onHeaderClick && onHeaderClick() + onChange(!isExpanded) + }} + icon={isExpanded ? : } + className="LemonCollapsePanel__header" + {...(dataAttr ? { 'data-attr': dataAttr } : {})} + size={size} + > + {header} + + ) : ( + + {header} + + )} {(status) => (
{ kind: NodeKind.ExperimentFunnelsQuery + name?: string + experiment_id?: integer funnels_query: FunnelsQuery - experiment_id: integer } export interface ExperimentTrendsQuery extends DataNode { kind: NodeKind.ExperimentTrendsQuery + name?: string + experiment_id?: integer count_query: TrendsQuery // Defaults to $feature_flag_called if not specified // https://github.com/PostHog/posthog/blob/master/posthog/hogql_queries/experiments/experiment_trends_query_runner.py exposure_query?: TrendsQuery - experiment_id: integer } /** @@ -2478,11 +2480,18 @@ export enum AssistantMessageType { export interface HumanMessage { type: AssistantMessageType.Human content: string + /** Human messages are only appended when done. */ + done: true } export interface AssistantMessage { type: AssistantMessageType.Assistant content: string + /** + * We only need this "done" value to tell when the particular message is finished during its streaming. + * It won't be necessary when we optimize streaming to NOT send the entire message every time a character is added. + */ + done?: boolean } export interface VisualizationMessage { @@ -2490,16 +2499,20 @@ export interface VisualizationMessage { plan?: string reasoning_steps?: string[] | null answer?: AssistantTrendsQuery | AssistantFunnelsQuery + done?: boolean } export interface FailureMessage { type: AssistantMessageType.Failure content?: string + done: true } export interface RouterMessage { type: AssistantMessageType.Router content: string + /** Router messages are not streamed, so they can only be done. */ + done: true } export type RootAssistantMessage = diff --git a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts index 1b458e909e0..d1257595c38 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts +++ b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts @@ -67,7 +67,7 @@ export const signupLogic = kea([ password: !values.preflight?.demo ? !password ? 'Please enter your password to continue' - : values.validatedPassword.feedback + : values.validatedPassword.feedback || undefined : undefined, }), submit: async () => { diff --git a/frontend/src/scenes/billing/Billing.tsx b/frontend/src/scenes/billing/Billing.tsx index fd06e8a6785..5e950359b5f 100644 --- a/frontend/src/scenes/billing/Billing.tsx +++ b/frontend/src/scenes/billing/Billing.tsx @@ -21,6 +21,7 @@ import { BillingCTAHero } from './BillingCTAHero' import { billingLogic } from './billingLogic' import { BillingProduct } from './BillingProduct' import { CreditCTAHero } from './CreditCTAHero' +import { PaymentEntryModal } from './PaymentEntryModal' import { UnsubscribeCard } from './UnsubscribeCard' export const scene: SceneExport = { @@ -82,6 +83,8 @@ export function Billing(): JSX.Element { const platformAndSupportProduct = products?.find((product) => product.type === 'platform_and_support') return (
+ + {showLicenseDirectInput && ( <>
diff --git a/frontend/src/scenes/billing/PaymentEntryModal.tsx b/frontend/src/scenes/billing/PaymentEntryModal.tsx index c4580450167..38fc535191f 100644 --- a/frontend/src/scenes/billing/PaymentEntryModal.tsx +++ b/frontend/src/scenes/billing/PaymentEntryModal.tsx @@ -1,12 +1,13 @@ import { LemonButton, LemonModal, Spinner } from '@posthog/lemon-ui' import { Elements, PaymentElement, useElements, useStripe } from '@stripe/react-stripe-js' -import { loadStripe } from '@stripe/stripe-js' import { useActions, useValues } from 'kea' -import { useEffect } from 'react' +import { WavingHog } from 'lib/components/hedgehogs' +import { useEffect, useState } from 'react' +import { urls } from 'scenes/urls' import { paymentEntryLogic } from './paymentEntryLogic' -const stripePromise = loadStripe(window.STRIPE_PUBLIC_KEY!) +const stripeJs = async (): Promise => await import('@stripe/stripe-js') export const PaymentForm = (): JSX.Element => { const { error, isLoading } = useValues(paymentEntryLogic) @@ -34,13 +35,17 @@ export const PaymentForm = (): JSX.Element => { setLoading(false) setError(result.error.message) } else { - pollAuthorizationStatus() + pollAuthorizationStatus(result.paymentIntent.id) } } return (
+

+ Your card will not be charged but we place a $0.50 hold on it to verify your card that will be released + in 7 days. +

{error &&
{error}
}
@@ -58,21 +63,38 @@ interface PaymentEntryModalProps { redirectPath?: string | null } -export const PaymentEntryModal = ({ redirectPath = null }: PaymentEntryModalProps): JSX.Element | null => { +export const PaymentEntryModal = ({ + redirectPath = urls.organizationBilling(), +}: PaymentEntryModalProps): JSX.Element => { const { clientSecret, paymentEntryModalOpen } = useValues(paymentEntryLogic) const { hidePaymentEntryModal, initiateAuthorization } = useActions(paymentEntryLogic) + const [stripePromise, setStripePromise] = useState(null) useEffect(() => { - initiateAuthorization(redirectPath) - }, [redirectPath]) + // Only load Stripe.js when the modal is opened + if (paymentEntryModalOpen && !stripePromise) { + const loadStripeJs = async (): Promise => { + const { loadStripe } = await stripeJs() + const publicKey = window.STRIPE_PUBLIC_KEY! + setStripePromise(await loadStripe(publicKey)) + } + void loadStripeJs() + } + }, [paymentEntryModalOpen, stripePromise]) + + useEffect(() => { + if (paymentEntryModalOpen) { + initiateAuthorization(redirectPath) + } + }, [paymentEntryModalOpen, initiateAuthorization, redirectPath]) return (
{clientSecret ? ( @@ -80,9 +102,13 @@ export const PaymentEntryModal = ({ redirectPath = null }: PaymentEntryModalProp ) : ( -
-
- +
+

We're contacting the Hedgehogs for approval.

+
+
+ +
+
)} diff --git a/frontend/src/scenes/billing/paymentEntryLogic.ts b/frontend/src/scenes/billing/paymentEntryLogic.ts index ebedbfe8b8a..ad2b84d0f80 100644 --- a/frontend/src/scenes/billing/paymentEntryLogic.ts +++ b/frontend/src/scenes/billing/paymentEntryLogic.ts @@ -12,7 +12,7 @@ export const paymentEntryLogic = kea({ setLoading: (loading) => ({ loading }), setError: (error) => ({ error }), initiateAuthorization: (redirectPath: string | null) => ({ redirectPath }), - pollAuthorizationStatus: true, + pollAuthorizationStatus: (paymentIntentId?: string) => ({ paymentIntentId }), setAuthorizationStatus: (status: string | null) => ({ status }), showPaymentEntryModal: true, hidePaymentEntryModal: true, @@ -73,7 +73,7 @@ export const paymentEntryLogic = kea({ } }, - pollAuthorizationStatus: async () => { + pollAuthorizationStatus: async ({ paymentIntentId }) => { const pollInterval = 2000 // Poll every 2 seconds const maxAttempts = 30 // Max 1 minute of polling (30 * 2 seconds) let attempts = 0 @@ -81,9 +81,9 @@ export const paymentEntryLogic = kea({ const poll = async (): Promise => { try { const urlParams = new URLSearchParams(window.location.search) - const paymentIntentId = urlParams.get('payment_intent') + const searchPaymentIntentId = urlParams.get('payment_intent') const response = await api.create('api/billing/activate/authorize/status', { - payment_intent_id: paymentIntentId, + payment_intent_id: paymentIntentId || searchPaymentIntentId, }) const status = response.status diff --git a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx index c0dac586cce..c2e9ed092af 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx @@ -60,7 +60,7 @@ export function QueryWindow(): JSX.Element { onQueryInputChange={runQuery} onSave={saveAsView} saveDisabledReason={ - hasErrors ? error ?? 'Query has errors' : !isValidView ? 'All fields must have an alias' : '' + hasErrors ? error ?? 'Query has errors' : !isValidView ? 'Some fields may need an alias' : '' } />
diff --git a/frontend/src/scenes/experiments/Experiment.stories.tsx b/frontend/src/scenes/experiments/Experiment.stories.tsx index 77d3637ba42..737b7f2973c 100644 --- a/frontend/src/scenes/experiments/Experiment.stories.tsx +++ b/frontend/src/scenes/experiments/Experiment.stories.tsx @@ -117,6 +117,7 @@ const MOCK_FUNNEL_EXPERIMENT: Experiment = { filter_test_accounts: true, }, metrics: [], + metrics_secondary: [], archived: false, created_by: { id: 1, @@ -174,6 +175,7 @@ const MOCK_TREND_EXPERIMENT: Experiment = { }, }, metrics: [], + metrics_secondary: [], parameters: { feature_flag_variants: [ { @@ -281,6 +283,7 @@ const MOCK_WEB_EXPERIMENT_MANY_VARIANTS: Experiment = { }, }, metrics: [], + metrics_secondary: [], parameters: { feature_flag_variants: [ { @@ -403,6 +406,7 @@ const MOCK_TREND_EXPERIMENT_MANY_VARIANTS: Experiment = { }, }, metrics: [], + metrics_secondary: [], parameters: { feature_flag_variants: [ { diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index 803e08e2031..cb7c1762cbc 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -17,14 +17,8 @@ import { experimentLogic } from './experimentLogic' const ExperimentFormFields = (): JSX.Element => { const { experiment, featureFlags, groupTypes, aggregationLabel, dynamicFeatureFlagKey } = useValues(experimentLogic) - const { - addExperimentGroup, - removeExperimentGroup, - setExperiment, - setNewExperimentInsight, - createExperiment, - setExperimentType, - } = useActions(experimentLogic) + const { addExperimentGroup, removeExperimentGroup, setExperiment, createExperiment, setExperimentType } = + useActions(experimentLogic) const { webExperimentsAvailable } = useValues(experimentsLogic) return ( @@ -130,7 +124,6 @@ const ExperimentFormFields = (): JSX.Element => { aggregation_group_type_index: groupTypeIndex ?? undefined, }, }) - setNewExperimentInsight() }} options={[ { value: -1, label: 'Persons' }, diff --git a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx index 95853426f53..575eb84c527 100644 --- a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx @@ -1,17 +1,17 @@ import { IconInfo } from '@posthog/icons' import { Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { InsightEmptyState } from 'scenes/insights/EmptyStates' import { InsightViz } from '~/queries/nodes/InsightViz/InsightViz' import { queryFromFilters } from '~/queries/nodes/InsightViz/utils' -import { InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' +import { CachedExperimentTrendsQueryResponse, InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' import { _TrendsExperimentResults, BaseMathType, ChartDisplayType, - Experiment, - ExperimentResults, InsightType, PropertyFilterType, PropertyOperator, @@ -20,68 +20,113 @@ import { import { experimentLogic } from '../experimentLogic' import { transformResultFilters } from '../utils' -const getCumulativeExposuresQuery = ( - experiment: Experiment, - experimentResults: ExperimentResults['result'] -): InsightVizNode => { - const experimentInsightType = experiment.filters?.insight || InsightType.TRENDS +export function CumulativeExposuresChart(): JSX.Element { + const { experiment, experimentResults, getMetricType } = useValues(experimentLogic) + const { featureFlags } = useValues(featureFlagLogic) + + const metricIdx = 0 + const metricType = getMetricType(metricIdx) const variants = experiment.parameters?.feature_flag_variants?.map((variant) => variant.key) || [] if (experiment.holdout) { variants.push(`holdout-${experiment.holdout.id}`) } - // Trends Experiment - if (experimentInsightType === InsightType.TRENDS && experiment.parameters?.custom_exposure_filter) { - const trendResults = experimentResults as _TrendsExperimentResults - const queryFilters = { - ...trendResults.exposure_filters, - display: ChartDisplayType.ActionsLineGraphCumulative, - } as _TrendsExperimentResults['exposure_filters'] - return queryFromFilters(transformResultFilters(queryFilters)) - } - return { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: - experimentInsightType === InsightType.TRENDS - ? '$feature_flag_called' - : experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ + let query + + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + if (metricType === InsightType.TRENDS) { + query = { + kind: NodeKind.InsightVizNode, + source: (experimentResults as CachedExperimentTrendsQueryResponse).exposure_query, + } + } else { + query = { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: experiment.start_date, + date_to: experiment.end_date, + }, + interval: 'day', + trendsFilter: { + display: ChartDisplayType.ActionsLineGraphCumulative, + showLegend: false, + smoothingIntervals: 1, + }, + series: [ { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, + kind: NodeKind.EventsNode, + event: experiment.filters?.events?.[0]?.name, + math: BaseMathType.UniqueUsers, + properties: [ + { + key: `$feature/${experiment.feature_flag_key}`, + value: variants, + operator: PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ], }, ], + breakdownFilter: { + breakdown: `$feature/${experiment.feature_flag_key}`, + breakdown_type: 'event', + }, }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', - }, - }, + } + } + } else { + if (metricType === InsightType.TRENDS && experiment.parameters?.custom_exposure_filter) { + const trendResults = experimentResults as _TrendsExperimentResults + const queryFilters = { + ...trendResults.exposure_filters, + display: ChartDisplayType.ActionsLineGraphCumulative, + } as _TrendsExperimentResults['exposure_filters'] + query = queryFromFilters(transformResultFilters(queryFilters)) + } else { + query = { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: experiment.start_date, + date_to: experiment.end_date, + }, + interval: 'day', + trendsFilter: { + display: ChartDisplayType.ActionsLineGraphCumulative, + showLegend: false, + smoothingIntervals: 1, + }, + series: [ + { + kind: NodeKind.EventsNode, + event: + metricType === InsightType.TRENDS + ? '$feature_flag_called' + : experiment.filters?.events?.[0]?.name, + math: BaseMathType.UniqueUsers, + properties: [ + { + key: `$feature/${experiment.feature_flag_key}`, + value: variants, + operator: PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ], + }, + ], + breakdownFilter: { + breakdown: `$feature/${experiment.feature_flag_key}`, + breakdown_type: 'event', + }, + }, + } + } } -} - -export function CumulativeExposuresChart(): JSX.Element { - const { experiment, experimentResults } = useValues(experimentLogic) return (
@@ -94,7 +139,7 @@ export function CumulativeExposuresChart(): JSX.Element { {experiment.start_date ? ( ), showTable: true, }} setQuery={() => {}} diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx index 214d38aa209..b6a69aeabab 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx @@ -19,7 +19,7 @@ export function DataCollection(): JSX.Element { const { experimentId, experiment, - experimentInsightType, + getMetricType, funnelResultsPersonsTotal, actualRunningTime, minimumDetectableEffect, @@ -27,11 +27,13 @@ export function DataCollection(): JSX.Element { const { openExperimentCollectionGoalModal } = useActions(experimentLogic) + const metricType = getMetricType(0) + const recommendedRunningTime = experiment?.parameters?.recommended_running_time || 1 const recommendedSampleSize = experiment?.parameters?.recommended_sample_size || 100 const experimentProgressPercent = - experimentInsightType === InsightType.FUNNELS + metricType === InsightType.FUNNELS ? (funnelResultsPersonsTotal / recommendedSampleSize) * 100 : (actualRunningTime / recommendedRunningTime) * 100 @@ -83,7 +85,7 @@ export function DataCollection(): JSX.Element { size="large" percent={experimentProgressPercent} /> - {experimentInsightType === InsightType.TRENDS && ( + {metricType === InsightType.TRENDS && (
Completed  @@ -103,7 +105,7 @@ export function DataCollection(): JSX.Element {
)} - {experimentInsightType === InsightType.FUNNELS && ( + {metricType === InsightType.FUNNELS && (
@@ -170,11 +172,19 @@ export function DataCollection(): JSX.Element { } export function DataCollectionGoalModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { isExperimentCollectionGoalModalOpen, goalInsightDataLoading } = useValues(experimentLogic({ experimentId })) + const { + isExperimentCollectionGoalModalOpen, + getMetricType, + trendMetricInsightLoading, + funnelMetricInsightLoading, + } = useValues(experimentLogic({ experimentId })) const { closeExperimentCollectionGoalModal, updateExperimentCollectionGoal } = useActions( experimentLogic({ experimentId }) ) + const isInsightLoading = + getMetricType(0) === InsightType.TRENDS ? trendMetricInsightLoading : funnelMetricInsightLoading + return ( } > - {goalInsightDataLoading ? ( + {isInsightLoading ? (
diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx index 91f2d8a0c7a..95938242c14 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx @@ -9,9 +9,8 @@ import { insightLogic } from 'scenes/insights/insightLogic' import { Query } from '~/queries/Query/Query' import { ExperimentIdType, InsightType } from '~/types' -import { EXPERIMENT_INSIGHT_ID } from '../constants' +import { MetricInsightId } from '../constants' import { experimentLogic } from '../experimentLogic' - interface ExperimentCalculatorProps { experimentId: ExperimentIdType } @@ -108,20 +107,25 @@ function TrendCalculation({ experimentId }: ExperimentCalculatorProps): JSX.Elem } export function DataCollectionCalculator({ experimentId }: ExperimentCalculatorProps): JSX.Element { - const { experimentInsightType, minimumDetectableEffect, experiment, conversionMetrics } = useValues( + const { getMetricType, minimumDetectableEffect, experiment, conversionMetrics } = useValues( experimentLogic({ experimentId }) ) const { setExperiment } = useActions(experimentLogic({ experimentId })) + const metricType = getMetricType(0) + // :KLUDGE: need these to mount the Query component to load the insight */ - const insightLogicInstance = insightLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID, syncWithUrl: false }) + const insightLogicInstance = insightLogic({ + dashboardItemId: metricType === InsightType.FUNNELS ? MetricInsightId.Funnels : MetricInsightId.Trends, + syncWithUrl: false, + }) const { insightProps } = useValues(insightLogicInstance) const { query } = useValues(insightDataLogic(insightProps)) const funnelConversionRate = conversionMetrics?.totalRate * 100 || 0 let sliderMaxValue = 0 - if (experimentInsightType === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { if (100 - funnelConversionRate < 50) { sliderMaxValue = 100 - funnelConversionRate } else { @@ -204,7 +208,7 @@ export function DataCollectionCalculator({ experimentId }: ExperimentCalculatorP The calculations are based on the events received in the last 14 days. This event count may differ from what was considered in earlier estimates. - {experimentInsightType === InsightType.TRENDS ? ( + {getMetricType(0) === InsightType.TRENDS ? ( ) : ( diff --git a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx index c8a5ffa9138..8225391583f 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx @@ -17,7 +17,7 @@ import { import { CumulativeExposuresChart } from './CumulativeExposuresChart' import { DataCollection } from './DataCollection' import { DistributionModal, DistributionTable } from './DistributionTable' -import { ExperimentExposureModal, ExperimentGoalModal, Goal } from './Goal' +import { Goal } from './Goal' import { Info } from './Info' import { Overview } from './Overview' import { ReleaseConditionsModal, ReleaseConditionsTable } from './ReleaseConditionsTable' @@ -26,7 +26,6 @@ import { SecondaryMetricsTable } from './SecondaryMetricsTable' const ResultsTab = (): JSX.Element => { const { experiment, experimentResults } = useValues(experimentLogic) - const { updateExperimentSecondaryMetrics } = useActions(experimentLogic) const hasResultsInsight = experimentResults && experimentResults.insight @@ -50,12 +49,7 @@ const ResultsTab = (): JSX.Element => { )} )} - updateExperimentSecondaryMetrics(metrics)} - initialMetrics={experiment.secondary_metrics} - defaultAggregationType={experiment.parameters?.aggregation_group_type_index} - /> +
) } @@ -126,8 +120,6 @@ export function ExperimentView(): JSX.Element { /> )} - - diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx index 776cd61c16d..1bf11115771 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -1,29 +1,84 @@ -import '../Experiment.scss' - import { IconInfo, IconPlus } from '@posthog/icons' -import { LemonButton, LemonDivider, LemonModal, Tooltip } from '@posthog/lemon-ui' +import { LemonButton, LemonDivider, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { Field, Form } from 'kea-forms' import { InsightLabel } from 'lib/components/InsightLabel' import { PropertyFilterButton } from 'lib/components/PropertyFilters/components/PropertyFilterButton' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { dayjs } from 'lib/dayjs' +import { useState } from 'react' -import { ActionFilter as ActionFilterType, AnyPropertyFilter, Experiment, FilterType, InsightType } from '~/types' +import { ExperimentFunnelsQuery, ExperimentTrendsQuery, FunnelsQuery, NodeKind, TrendsQuery } from '~/queries/schema' +import { ActionFilter, AnyPropertyFilter, ChartDisplayType, Experiment, FilterType, InsightType } from '~/types' -import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from '../constants' -import { experimentLogic } from '../experimentLogic' -import { MetricSelector } from '../MetricSelector' +import { experimentLogic, getDefaultFilters, getDefaultFunnelsMetric } from '../experimentLogic' +import { PrimaryMetricModal } from '../Metrics/PrimaryMetricModal' +import { PrimaryTrendsExposureModal } from '../Metrics/PrimaryTrendsExposureModal' -export function MetricDisplay({ filters }: { filters?: FilterType }): JSX.Element { - const experimentInsightType = filters?.insight || InsightType.TRENDS +export function MetricDisplayTrends({ query }: { query: TrendsQuery | undefined }): JSX.Element { + const event = query?.series?.[0] as unknown as ActionFilter + + if (!event) { + return <> + } return ( <> - {([...(filters?.events || []), ...(filters?.actions || [])] as ActionFilterType[]) +
+
+ + + +
+
+ {event.properties?.map((prop: AnyPropertyFilter) => ( + + ))} +
+
+ + ) +} + +export function MetricDisplayFunnels({ query }: { query: FunnelsQuery }): JSX.Element { + return ( + <> + {(query.series || []).map((event: any, idx: number) => ( +
+
+
+ {idx + 1} +
+ + + +
+
+ {event.properties?.map((prop: AnyPropertyFilter) => ( + + ))} +
+
+ ))} + + ) +} + +// :FLAG: CLEAN UP AFTER MIGRATION +export function MetricDisplayOld({ filters }: { filters?: FilterType }): JSX.Element { + const metricType = filters?.insight || InsightType.TRENDS + + return ( + <> + {([...(filters?.events || []), ...(filters?.actions || [])] as ActionFilter[]) .sort((a, b) => (a.order || 0) - (b.order || 0)) - .map((event: ActionFilterType, idx: number) => ( + .map((event: ActionFilter, idx: number) => (
- {experimentInsightType === InsightType.FUNNELS && ( + {metricType === InsightType.FUNNELS && (
@@ -53,8 +108,19 @@ export function MetricDisplay({ filters }: { filters?: FilterType }): JSX.Elemen } export function ExposureMetric({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { experiment } = useValues(experimentLogic({ experimentId })) - const { openExperimentExposureModal, updateExperimentExposure } = useActions(experimentLogic({ experimentId })) + const { experiment, featureFlags } = useValues(experimentLogic({ experimentId })) + const { updateExperimentExposure, loadExperiment, setExperiment } = useActions(experimentLogic({ experimentId })) + const [isModalOpen, setIsModalOpen] = useState(false) + + const metricIdx = 0 + + // :FLAG: CLEAN UP AFTER MIGRATION + let hasCustomExposure = false + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + hasCustomExposure = !!(experiment.metrics[metricIdx] as ExperimentTrendsQuery).exposure_query + } else { + hasCustomExposure = !!experiment.parameters?.custom_exposure_filter + } return ( <> @@ -66,154 +132,117 @@ export function ExposureMetric({ experimentId }: { experimentId: Experiment['id'
- {experiment.parameters?.custom_exposure_filter ? ( - + {/* :FLAG: CLEAN UP AFTER MIGRATION */} + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] ? ( + hasCustomExposure ? ( + + ) : ( + Default via $feature_flag_called events + ) + ) : hasCustomExposure ? ( + ) : ( Default via $feature_flag_called events )}
- + { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + if (!hasCustomExposure) { + setExperiment({ + ...experiment, + metrics: experiment.metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + exposure_query: { + kind: NodeKind.TrendsQuery, + series: [ + { + kind: NodeKind.EventsNode, + name: '$pageview', + event: '$pageview', + }, + ], + interval: 'day', + dateRange: { + date_from: dayjs() + .subtract(EXPERIMENT_DEFAULT_DURATION, 'day') + .format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + explicitDate: true, + }, + trendsFilter: { + display: ChartDisplayType.ActionsLineGraph, + }, + filterTestAccounts: true, + }, + } + : metric + ), + }) + } + } else { + if (!hasCustomExposure) { + setExperiment({ + ...experiment, + parameters: { + ...experiment.parameters, + custom_exposure_filter: getDefaultFilters(InsightType.TRENDS, undefined), + }, + }) + } + } + setIsModalOpen(true) + }} + className="mr-2" + > Change exposure metric - {experiment.parameters?.custom_exposure_filter && ( + {hasCustomExposure && ( updateExperimentExposure(null)} + onClick={() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setExperiment({ + ...experiment, + metrics: experiment.metrics.map((metric, idx) => + idx === metricIdx ? { ...metric, exposure_query: undefined } : metric + ), + }) + } + updateExperimentExposure(null) + }} > Reset )}
+ { + setIsModalOpen(false) + loadExperiment() + }} + /> ) } -export function ExperimentGoalModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { experiment, isExperimentGoalModalOpen, experimentLoading, goalInsightDataLoading, experimentInsightType } = - useValues(experimentLogic({ experimentId })) - const { closeExperimentGoalModal, updateExperimentGoal, setNewExperimentInsight } = useActions( - experimentLogic({ experimentId }) - ) - - const experimentFiltersLength = - (experiment.filters?.events?.length || 0) + (experiment.filters?.actions?.length || 0) - - return ( - - - Cancel - - { - updateExperimentGoal(experiment.filters) - }} - type="primary" - loading={experimentLoading} - data-attr="create-annotation-submit" - > - Save - -
- } - > - - - - - - - ) -} - -export function ExperimentExposureModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { experiment, isExperimentExposureModalOpen, experimentLoading } = useValues( - experimentLogic({ experimentId }) - ) - const { closeExperimentExposureModal, updateExperimentExposure, setExperimentExposureInsight } = useActions( - experimentLogic({ experimentId }) - ) - - return ( - - - Cancel - - { - if (experiment.parameters.custom_exposure_filter) { - updateExperimentExposure(experiment.parameters.custom_exposure_filter) - } - }} - type="primary" - loading={experimentLoading} - data-attr="create-annotation-submit" - > - Save - -
- } - > -
- - - -
- - ) -} - export function Goal(): JSX.Element { - const { experiment, experimentId, experimentInsightType, experimentMathAggregationForTrends, hasGoalSet } = + const { experiment, experimentId, getMetricType, experimentMathAggregationForTrends, hasGoalSet, featureFlags } = useValues(experimentLogic) - const { openExperimentGoalModal } = useActions(experimentLogic({ experimentId })) + const { setExperiment, loadExperiment } = useActions(experimentLogic) + const [isModalOpen, setIsModalOpen] = useState(false) + const metricType = getMetricType(0) return (
@@ -224,8 +253,8 @@ export function Goal(): JSX.Element { title={ <> {' '} - This {experimentInsightType === InsightType.FUNNELS ? 'funnel' : 'trend'}{' '} - {experimentInsightType === InsightType.FUNNELS + This {metricType === InsightType.FUNNELS ? 'funnel' : 'trend'}{' '} + {metricType === InsightType.FUNNELS ? 'experiment measures conversion at each stage.' : 'experiment tracks the count of a single metric.'} @@ -245,7 +274,20 @@ export function Goal(): JSX.Element { type="secondary" size="small" data-attr="add-experiment-goal" - onClick={openExperimentGoalModal} + onClick={() => { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setExperiment({ + ...experiment, + metrics: [getDefaultFunnelsMetric()], + }) + } else { + setExperiment({ + ...experiment, + filters: getDefaultFilters(InsightType.FUNNELS, undefined), + }) + } + setIsModalOpen(true) + }} > Add goal @@ -254,14 +296,27 @@ export function Goal(): JSX.Element {
- {experimentInsightType === InsightType.FUNNELS ? 'Conversion goal steps' : 'Trend goal'} + {metricType === InsightType.FUNNELS ? 'Conversion goal steps' : 'Trend goal'}
- - + {/* :FLAG: CLEAN UP AFTER MIGRATION */} + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] ? ( + metricType === InsightType.FUNNELS ? ( + + ) : ( + + ) + ) : ( + + )} + setIsModalOpen(true)}> Change goal
- {experimentInsightType === InsightType.TRENDS && !experimentMathAggregationForTrends() && ( + {metricType === InsightType.TRENDS && !experimentMathAggregationForTrends() && ( <>
@@ -273,6 +328,14 @@ export function Goal(): JSX.Element { )}
)} + { + setIsModalOpen(false) + loadExperiment() + }} + />
) } diff --git a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx index 066144566b0..40a1e8133be 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx @@ -1,131 +1,36 @@ -import '../Experiment.scss' - import { IconInfo, IconPencil, IconPlus } from '@posthog/icons' -import { LemonButton, LemonInput, LemonModal, LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' +import { LemonButton, LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { Form } from 'kea-forms' import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' +import { FEATURE_FLAGS } from 'lib/constants' import { IconAreaChart } from 'lib/lemon-ui/icons' -import { LemonField } from 'lib/lemon-ui/LemonField' import { capitalizeFirstLetter } from 'lib/utils' +import { useState } from 'react' -import { InsightType } from '~/types' +import { Experiment, InsightType } from '~/types' -import { SECONDARY_METRIC_INSIGHT_ID } from '../constants' -import { experimentLogic, TabularSecondaryMetricResults } from '../experimentLogic' -import { MetricSelector } from '../MetricSelector' -import { MAX_SECONDARY_METRICS, secondaryMetricsLogic, SecondaryMetricsProps } from '../secondaryMetricsLogic' -import { ResultsQuery, VariantTag } from './components' +import { + experimentLogic, + getDefaultFilters, + getDefaultFunnelsMetric, + TabularSecondaryMetricResults, +} from '../experimentLogic' +import { SecondaryMetricChartModal } from '../Metrics/SecondaryMetricChartModal' +import { SecondaryMetricModal } from '../Metrics/SecondaryMetricModal' +import { VariantTag } from './components' -export function SecondaryMetricsModal({ - onMetricsChange, - initialMetrics, - experimentId, - defaultAggregationType, -}: SecondaryMetricsProps): JSX.Element { - const logic = secondaryMetricsLogic({ onMetricsChange, initialMetrics, experimentId, defaultAggregationType }) - const { - secondaryMetricModal, - isModalOpen, - showResults, - isSecondaryMetricModalSubmitting, - existingModalSecondaryMetric, - metricIdx, - } = useValues(logic) +const MAX_SECONDARY_METRICS = 10 - const { deleteMetric, closeModal, saveSecondaryMetric, setPreviewInsight } = useActions(logic) - const { secondaryMetricResults, isExperimentRunning } = useValues(experimentLogic({ experimentId })) - const targetResults = secondaryMetricResults && secondaryMetricResults[metricIdx] - - return ( - - Close - - ) : ( - <> - {existingModalSecondaryMetric && ( - deleteMetric(metricIdx)} - > - Delete - - )} -
- - Cancel - - - {existingModalSecondaryMetric ? 'Save' : 'Create'} - -
- - ) - } - > - {showResults ? ( - - ) : ( -
- - - - - - -
- )} -
- ) -} - -export function SecondaryMetricsTable({ - onMetricsChange, - initialMetrics, - experimentId, - defaultAggregationType, -}: SecondaryMetricsProps): JSX.Element { - const logic = secondaryMetricsLogic({ onMetricsChange, initialMetrics, experimentId, defaultAggregationType }) - const { metrics } = useValues(logic) - - const { openModalToCreateSecondaryMetric, openModalToEditSecondaryMetric } = useActions(logic) +export function SecondaryMetricsTable({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const [isEditModalOpen, setIsEditModalOpen] = useState(false) + const [isChartModalOpen, setIsChartModalOpen] = useState(false) + const [modalMetricIdx, setModalMetricIdx] = useState(null) const { experimentResults, secondaryMetricResultsLoading, experiment, + getSecondaryMetricType, secondaryMetricResults, tabularSecondaryMetricResults, countDataForVariant, @@ -134,7 +39,38 @@ export function SecondaryMetricsTable({ credibleIntervalForVariant, experimentMathAggregationForTrends, getHighestProbabilityVariant, + featureFlags, } = useValues(experimentLogic({ experimentId })) + const { loadExperiment } = useActions(experimentLogic({ experimentId })) + + const openEditModal = (idx: number): void => { + setModalMetricIdx(idx) + setIsEditModalOpen(true) + } + + const closeEditModal = (): void => { + setIsEditModalOpen(false) + setModalMetricIdx(null) + loadExperiment() + } + + const openChartModal = (idx: number): void => { + setModalMetricIdx(idx) + setIsChartModalOpen(true) + } + + const closeChartModal = (): void => { + setIsChartModalOpen(false) + setModalMetricIdx(null) + } + + // :FLAG: CLEAN UP AFTER MIGRATION + let metrics + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + metrics = experiment.metrics_secondary + } else { + metrics = experiment.secondary_metrics + } const columns: LemonTableColumns = [ { @@ -156,14 +92,15 @@ export function SecondaryMetricsTable({ }, ] - experiment.secondary_metrics?.forEach((metric, idx) => { + metrics?.forEach((metric, idx) => { const targetResults = secondaryMetricResults?.[idx] const winningVariant = getHighestProbabilityVariant(targetResults || null) + const metricType = getSecondaryMetricType(idx) const Header = (): JSX.Element => (
-
{capitalizeFirstLetter(metric.name)}
+
{capitalizeFirstLetter(metric.name || '')}
} - onClick={() => openModalToEditSecondaryMetric(metric, idx, true)} + onClick={() => openChartModal(idx)} disabledReason={ targetResults && targetResults.insight ? undefined @@ -183,7 +120,7 @@ export function SecondaryMetricsTable({ type="secondary" size="xsmall" icon={} - onClick={() => openModalToEditSecondaryMetric(metric, idx, false)} + onClick={() => openEditModal(idx)} />
@@ -191,7 +128,7 @@ export function SecondaryMetricsTable({
) - if (metric.filters.insight === InsightType.TRENDS) { + if (metricType === InsightType.TRENDS) { columns.push({ title:
, children: [ @@ -230,7 +167,11 @@ export function SecondaryMetricsTable({ if (item.variant === 'control') { return Baseline } - const credibleInterval = credibleIntervalForVariant(targetResults || null, item.variant) + const credibleInterval = credibleIntervalForVariant( + targetResults || null, + item.variant, + metricType + ) if (!credibleInterval) { return <>— } @@ -281,7 +222,11 @@ export function SecondaryMetricsTable({ return Baseline } - const credibleInterval = credibleIntervalForVariant(targetResults || null, item.variant) + const credibleInterval = credibleIntervalForVariant( + targetResults || null, + item.variant, + metricType + ) if (!credibleInterval) { return <>— } @@ -332,18 +277,11 @@ export function SecondaryMetricsTable({
{metrics && metrics.length > 0 && (
- = MAX_SECONDARY_METRICS - ? `You can only add up to ${MAX_SECONDARY_METRICS} secondary metrics.` - : undefined - } - > - Add metric - +
)}
@@ -365,24 +303,76 @@ export function SecondaryMetricsTable({ Add up to {MAX_SECONDARY_METRICS} secondary metrics to monitor side effects of your experiment.
- } - type="secondary" - size="small" - onClick={openModalToCreateSecondaryMetric} - > - Add metric - +
)}
- + ) } + +const AddSecondaryMetricButton = ({ + experimentId, + metrics, + openEditModal, +}: { + experimentId: Experiment['id'] + metrics: any + openEditModal: (metricIdx: number) => void +}): JSX.Element => { + const { experiment, featureFlags } = useValues(experimentLogic({ experimentId })) + const { setExperiment } = useActions(experimentLogic({ experimentId })) + return ( + } + type="secondary" + size="small" + onClick={() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] + setExperiment({ + metrics_secondary: newMetricsSecondary, + }) + openEditModal(newMetricsSecondary.length - 1) + } else { + const newSecondaryMetrics = [ + ...experiment.secondary_metrics, + { + name: '', + filters: getDefaultFilters(InsightType.FUNNELS, undefined), + }, + ] + setExperiment({ + secondary_metrics: newSecondaryMetrics, + }) + openEditModal(newSecondaryMetrics.length - 1) + } + }} + disabledReason={ + metrics.length >= MAX_SECONDARY_METRICS + ? `You can only add up to ${MAX_SECONDARY_METRICS} secondary metrics.` + : undefined + } + > + Add metric + + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx index d4049eedd49..b859dae72e0 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -11,8 +11,6 @@ import posthog from 'posthog-js' import { urls } from 'scenes/urls' import { - _FunnelExperimentResults, - _TrendsExperimentResults, FilterLogicalOperator, FunnelExperimentVariant, InsightType, @@ -33,13 +31,15 @@ export function SummaryTable(): JSX.Element { experiment, experimentResults, tabularExperimentResults, - experimentInsightType, + getMetricType, exposureCountDataForVariant, conversionRateForVariant, experimentMathAggregationForTrends, countDataForVariant, getHighestProbabilityVariant, + credibleIntervalForVariant, } = useValues(experimentLogic) + const metricType = getMetricType(0) if (!experimentResults) { return <> @@ -61,7 +61,7 @@ export function SummaryTable(): JSX.Element { }, ] - if (experimentInsightType === InsightType.TRENDS) { + if (metricType === InsightType.TRENDS) { columns.push({ key: 'counts', title: ( @@ -163,22 +163,11 @@ export function SummaryTable(): JSX.Element { return Baseline } - const credibleInterval = (experimentResults as _TrendsExperimentResults)?.credible_intervals?.[ - variant.key - ] + const credibleInterval = credibleIntervalForVariant(experimentResults || null, variant.key, metricType) if (!credibleInterval) { return <>— } - - const controlVariant = (experimentResults.variants as TrendExperimentVariant[]).find( - ({ key }) => key === 'control' - ) as TrendExperimentVariant - const controlMean = controlVariant.count / controlVariant.absolute_exposure - - // Calculate the percentage difference between the credible interval bounds of the variant and the control's mean. - // This represents the range in which the true percentage change relative to the control is likely to fall. - const lowerBound = ((credibleInterval[0] - controlMean) / controlMean) * 100 - const upperBound = ((credibleInterval[1] - controlMean) / controlMean) * 100 + const [lowerBound, upperBound] = credibleInterval return (
{`[${lowerBound > 0 ? '+' : ''}${lowerBound.toFixed(2)}%, ${ @@ -189,7 +178,7 @@ export function SummaryTable(): JSX.Element { }) } - if (experimentInsightType === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { columns.push({ key: 'conversionRate', title: 'Conversion rate', @@ -248,27 +237,11 @@ export function SummaryTable(): JSX.Element { return Baseline } - const credibleInterval = (experimentResults as _FunnelExperimentResults)?.credible_intervals?.[ - item.key - ] + const credibleInterval = credibleIntervalForVariant(experimentResults || null, item.key, metricType) if (!credibleInterval) { return <>— } - - const controlVariant = (experimentResults.variants as FunnelExperimentVariant[]).find( - ({ key }) => key === 'control' - ) as FunnelExperimentVariant - const controlConversionRate = - controlVariant.success_count / (controlVariant.success_count + controlVariant.failure_count) - - if (!controlConversionRate) { - return <>— - } - - // Calculate the percentage difference between the credible interval bounds of the variant and the control's conversion rate. - // This represents the range in which the true percentage change relative to the control is likely to fall. - const lowerBound = ((credibleInterval[0] - controlConversionRate) / controlConversionRate) * 100 - const upperBound = ((credibleInterval[1] - controlConversionRate) / controlConversionRate) * 100 + const [lowerBound, upperBound] = credibleInterval return (
{`[${lowerBound > 0 ? '+' : ''}${lowerBound.toFixed(2)}%, ${ diff --git a/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx b/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx index 3580428d041..81286363c3b 100644 --- a/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx @@ -1,4 +1,4 @@ -import { IconUpload, IconX } from '@posthog/icons' +import { IconX } from '@posthog/icons' import { LemonButton, LemonDivider, LemonFileInput, LemonModal, LemonSkeleton, lemonToast } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { useUploadFiles } from 'lib/hooks/useUploadFiles' @@ -17,20 +17,34 @@ export function VariantScreenshot({ const { experiment } = useValues(experimentLogic) const { updateExperimentVariantImages, reportExperimentVariantScreenshotUploaded } = useActions(experimentLogic) - const [mediaId, setMediaId] = useState(experiment.parameters?.variant_screenshot_media_ids?.[variantKey] || null) - const [isLoadingImage, setIsLoadingImage] = useState(true) - const [isModalOpen, setIsModalOpen] = useState(false) + const getInitialMediaIds = (): string[] => { + const variantImages = experiment.parameters?.variant_screenshot_media_ids?.[variantKey] + if (!variantImages) { + return [] + } + + return Array.isArray(variantImages) ? variantImages : [variantImages] + } + + const [mediaIds, setMediaIds] = useState(getInitialMediaIds()) + const [loadingImages, setLoadingImages] = useState>({}) + const [selectedImageIndex, setSelectedImageIndex] = useState(null) const { setFilesToUpload, filesToUpload, uploading } = useUploadFiles({ onUpload: (_, __, id) => { - setMediaId(id) - if (id) { + if (id && mediaIds.length < 5) { + const newMediaIds = [...mediaIds, id] + setMediaIds(newMediaIds) + const updatedVariantImages = { ...experiment.parameters?.variant_screenshot_media_ids, - [variantKey]: id, + [variantKey]: newMediaIds, } + updateExperimentVariantImages(updatedVariantImages) reportExperimentVariantScreenshotUploaded(experiment.id) + } else if (mediaIds.length >= 5) { + lemonToast.error('Maximum of 5 images allowed') } }, onError: (detail) => { @@ -38,64 +52,107 @@ export function VariantScreenshot({ }, }) + const handleImageLoad = (mediaId: string): void => { + setLoadingImages((prev) => ({ ...prev, [mediaId]: false })) + } + + const handleImageError = (mediaId: string): void => { + setLoadingImages((prev) => ({ ...prev, [mediaId]: false })) + } + + const handleDelete = (indexToDelete: number): void => { + const newMediaIds = mediaIds.filter((_, index) => index !== indexToDelete) + setMediaIds(newMediaIds) + + const updatedVariantImages = { + ...experiment.parameters?.variant_screenshot_media_ids, + [variantKey]: newMediaIds, + } + + updateExperimentVariantImages(updatedVariantImages) + } + + const getThumbnailWidth = (): string => { + const totalItems = mediaIds.length < 5 ? mediaIds.length + 1 : mediaIds.length + switch (totalItems) { + case 1: + return 'w-20' + case 2: + return 'w-20' + case 3: + return 'w-16' + case 4: + return 'w-14' + case 5: + return 'w-12' + default: + return 'w-20' + } + } + + const widthClass = getThumbnailWidth() + return (
- {!mediaId ? ( - - - Upload a preview of this variant's UI - - } - /> - ) : ( -
-
-
setIsModalOpen(true)} className="cursor-zoom-in relative"> -
- {isLoadingImage && } - setIsLoadingImage(false)} - onLoad={() => setIsLoadingImage(false)} - /> -
-
- } - onClick={(e) => { - e.stopPropagation() - setMediaId(null) - const updatedVariantImages = { - ...experiment.parameters?.variant_screenshot_media_ids, - } - delete updatedVariantImages[variantKey] - updateExperimentVariantImages(updatedVariantImages) - }} - size="small" - tooltip="Remove" - tooltipPlacement="right" - noPadding - className="group-hover:flex hidden absolute right-0 top-0" - /> +
+ {mediaIds.map((mediaId, index) => ( +
+
+
setSelectedImageIndex(index)} className="cursor-zoom-in relative"> +
+ {loadingImages[mediaId] && } + handleImageError(mediaId)} + onLoad={() => handleImageLoad(mediaId)} + /> +
+
+ } + onClick={(e) => { + e.stopPropagation() + handleDelete(index) + }} + size="small" + tooltip="Remove" + tooltipPlacement="right" + noPadding + className="group-hover:flex hidden absolute right-0 top-0" + /> +
-
- )} + ))} + + {mediaIds.length < 5 && ( +
+ + + +
+ } + /> +
+ )} +
+ setIsModalOpen(false)} + isOpen={selectedImageIndex !== null} + onClose={() => setSelectedImageIndex(null)} title={
- Screenshot + Screenshot {selectedImageIndex !== null ? selectedImageIndex + 1 : ''} {rolloutPercentage !== undefined && ( @@ -104,12 +161,20 @@ export function VariantScreenshot({
} > - {`Screenshot: + {selectedImageIndex !== null && mediaIds[selectedImageIndex] && ( + {`Screenshot + )}
) } + +export default VariantScreenshot diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index ddcd2bbd14b..fd8751da6c2 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -153,7 +153,6 @@ export function ResultsQuery({ } as InsightVizNode, result: newQueryResults?.insight, disable_baseline: true, - last_refresh: newQueryResults?.last_refresh, }, doNotLoad: true, }, @@ -265,6 +264,8 @@ export function ExploreButton({ icon = }: { icon?: JSX.Element } export function ResultsHeader(): JSX.Element { + const { experimentResults } = useValues(experimentLogic) + return (
@@ -275,9 +276,7 @@ export function ResultsHeader(): JSX.Element {
-
- -
+
{experimentResults && }
) @@ -691,7 +690,7 @@ export function ShipVariantModal({ experimentId }: { experimentId: Experiment['i export function ActionBanner(): JSX.Element { const { experiment, - experimentInsightType, + getMetricType, experimentResults, experimentLoading, experimentResultsLoading, @@ -708,6 +707,9 @@ export function ActionBanner(): JSX.Element { const { archiveExperiment } = useActions(experimentLogic) const { aggregationLabel } = useValues(groupsModel) + + const metricType = getMetricType(0) + const aggregationTargetName = experiment.filters.aggregation_group_type_index != null ? aggregationLabel(experiment.filters.aggregation_group_type_index).plural @@ -766,7 +768,7 @@ export function ActionBanner(): JSX.Element { // Results insignificant, but a large enough sample/running time has been achieved // Further collection unlikely to change the result -> recommmend cutting the losses if ( - experimentInsightType === InsightType.FUNNELS && + metricType === InsightType.FUNNELS && funnelResultsPersonsTotal > Math.max(recommendedSampleSize, 500) && dayjs().diff(experiment.start_date, 'day') > 2 // at least 2 days running ) { @@ -778,7 +780,7 @@ export function ActionBanner(): JSX.Element { ) } - if (experimentInsightType === InsightType.TRENDS && actualRunningTime > Math.max(recommendedRunningTime, 7)) { + if (metricType === InsightType.TRENDS && actualRunningTime > Math.max(recommendedRunningTime, 7)) { return ( Your experiment has been running long enough, but the results are still inconclusive. Continuing the @@ -807,7 +809,7 @@ export function ActionBanner(): JSX.Element { // Win probability only slightly over 0.9 and the recommended sample/time just met -> proceed with caution if ( - experimentInsightType === InsightType.FUNNELS && + metricType === InsightType.FUNNELS && funnelResultsPersonsTotal < recommendedSampleSize + 50 && winProbability < 0.93 ) { @@ -821,7 +823,7 @@ export function ActionBanner(): JSX.Element { } if ( - experimentInsightType === InsightType.TRENDS && + metricType === InsightType.TRENDS && actualRunningTime < recommendedRunningTime + 2 && winProbability < 0.93 ) { diff --git a/frontend/src/scenes/experiments/ExperimentWorkflow.tsx b/frontend/src/scenes/experiments/ExperimentWorkflow.tsx deleted file mode 100644 index 763c367eb71..00000000000 --- a/frontend/src/scenes/experiments/ExperimentWorkflow.tsx +++ /dev/null @@ -1,73 +0,0 @@ -import './Experiment.scss' - -import { IconCheckCircle } from '@posthog/icons' -import clsx from 'clsx' -import { IconRadioButtonUnchecked } from 'lib/lemon-ui/icons' -import { useState } from 'react' - -export function ExperimentWorkflow(): JSX.Element { - const [workflowValidateStepCompleted, setWorkflowValidateStepCompleted] = useState(false) - const [workflowLaunchStepCompleted, setWorkflowLaunchStepCompleted] = useState(false) - - return ( - <> -
-
Experiment workflow
-
-
-
-
- - Create experiment -
-
Set variants, select participants, and add secondary metrics
-
-
-
-
setWorkflowValidateStepCompleted(!workflowValidateStepCompleted)} - > -
- {workflowValidateStepCompleted ? ( - - ) : ( - - )} - Validate experiment -
-
- Once you've written your code, it's a good idea to test that each variant behaves as - you'd expect. -
-
-
-
-
setWorkflowLaunchStepCompleted(!workflowLaunchStepCompleted)} - > -
- {workflowLaunchStepCompleted ? ( - - ) : ( - - )} - Launch experiment -
-
- Run your experiment, monitor results, and decide when to terminate your experiment. -
-
-
-
-
- - ) -} diff --git a/frontend/src/scenes/experiments/MetricSelector.tsx b/frontend/src/scenes/experiments/MetricSelector.tsx deleted file mode 100644 index 446505d6772..00000000000 --- a/frontend/src/scenes/experiments/MetricSelector.tsx +++ /dev/null @@ -1,212 +0,0 @@ -import './Experiment.scss' - -import { IconInfo } from '@posthog/icons' -import { LemonSelect, Link } from '@posthog/lemon-ui' -import { BindLogic, useActions, useValues } from 'kea' -import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { EXPERIMENT_DEFAULT_DURATION } from 'lib/constants' -import { LemonBanner } from 'lib/lemon-ui/LemonBanner' -import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { useEffect } from 'react' -import { Attribution } from 'scenes/insights/EditorFilters/AttributionFilter' -import { SamplingFilter } from 'scenes/insights/EditorFilters/SamplingFilter' -import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' -import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' -import { AggregationSelect } from 'scenes/insights/filters/AggregationSelect' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' -import { insightLogic } from 'scenes/insights/insightLogic' -import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { FunnelConversionWindowFilter } from 'scenes/insights/views/Funnels/FunnelConversionWindowFilter' - -import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' -import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { InsightTestAccountFilter } from '~/queries/nodes/InsightViz/filters/InsightTestAccountFilter' -import { Query } from '~/queries/Query/Query' -import { FunnelsQuery, InsightQueryNode, TrendsQuery } from '~/queries/schema' -import { EditorFilterProps, FilterType, InsightLogicProps, InsightShortId, InsightType } from '~/types' - -export interface MetricSelectorProps { - dashboardItemId: InsightShortId - setPreviewInsight: (filters?: Partial) => void - showDateRangeBanner?: boolean - forceTrendExposureMetric?: boolean -} - -export function MetricSelector({ - dashboardItemId, - setPreviewInsight, - showDateRangeBanner, - forceTrendExposureMetric, -}: MetricSelectorProps): JSX.Element { - // insightLogic - const logic = insightLogic({ dashboardItemId, syncWithUrl: false }) - const { insightProps } = useValues(logic) - - // insightDataLogic - const { query } = useValues(insightDataLogic(insightProps)) - - // insightVizDataLogic - const { isTrends } = useValues(insightVizDataLogic(insightProps)) - - useEffect(() => { - if (forceTrendExposureMetric && !isTrends) { - setPreviewInsight({ insight: InsightType.TRENDS }) - } - }, [forceTrendExposureMetric, isTrends]) - - return ( - <> -
- Insight Type - { - val && setPreviewInsight({ insight: val }) - }} - options={[ - { value: InsightType.TRENDS, label: Trends }, - { value: InsightType.FUNNELS, label: Funnels }, - ]} - disabledReason={forceTrendExposureMetric ? 'Exposure metric can only be a trend graph' : undefined} - /> -
- -
- -
-
- - - - {showDateRangeBanner && ( - - Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a - mismatch between the preview and the actual results. - - )} - -
- - - -
- - ) -} - -export function ExperimentInsightCreator({ insightProps }: { insightProps: InsightLogicProps }): JSX.Element { - // insightVizDataLogic - const { isTrends, series, querySource } = useValues(insightVizDataLogic(insightProps)) - const { updateQuerySource } = useActions(insightVizDataLogic(insightProps)) - - // calculated properties - const filterSteps = series || [] - const isStepsEmpty = filterSteps.length === 0 - - return ( - <> - ): void => { - updateQuerySource({ - series: actionsAndEventsToSeries( - payload as any, - true, - isTrends ? MathAvailability.All : MathAvailability.None - ), - } as TrendsQuery | FunnelsQuery) - }} - typeKey={`experiment-${isTrends ? InsightType.TRENDS : InsightType.FUNNELS}-${ - insightProps.dashboardItemId - }-metric`} - mathAvailability={isTrends ? undefined : MathAvailability.None} - hideDeleteBtn={isTrends || filterSteps.length === 1} - buttonCopy={isTrends ? 'Add graph series' : 'Add funnel step'} - showSeriesIndicator={isTrends || !isStepsEmpty} - entitiesLimit={isTrends ? 1 : undefined} - seriesIndicatorType={isTrends ? undefined : 'numeric'} - sortable={isTrends ? undefined : true} - showNestedArrow={isTrends ? undefined : true} - showNumericalPropsOnly={isTrends} - actionsTaxonomicGroupTypes={[ - TaxonomicFilterGroupType.Events, - TaxonomicFilterGroupType.Actions, - TaxonomicFilterGroupType.DataWarehouse, - ]} - propertiesTaxonomicGroupTypes={[ - TaxonomicFilterGroupType.EventProperties, - TaxonomicFilterGroupType.PersonProperties, - TaxonomicFilterGroupType.EventFeatureFlags, - TaxonomicFilterGroupType.Cohorts, - TaxonomicFilterGroupType.Elements, - TaxonomicFilterGroupType.SessionProperties, - TaxonomicFilterGroupType.HogQLExpression, - TaxonomicFilterGroupType.DataWarehouseProperties, - TaxonomicFilterGroupType.DataWarehousePersonProperties, - ]} - /> -
- {!isTrends && ( - <> -
- Aggregating by - -
- - - - )} - -
- - ) -} - -export function AttributionSelect({ insightProps }: EditorFilterProps): JSX.Element { - return ( -
-
- Attribution type - -
- When breaking down funnels, it's possible that the same properties don't exist on every - event. For example, if you want to break down by browser on a funnel that contains both - frontend and backend events. -
-
- In this case, you can choose from which step the properties should be selected from by - modifying the attribution type. There are four modes to choose from: -
-
    -
  • First touchpoint: the first property value seen in any of the steps is chosen.
  • -
  • Last touchpoint: the last property value seen from all steps is chosen.
  • -
  • - All steps: the property value must be seen in all steps to be considered in the - funnel. -
  • -
  • Specific step: only the property value seen at the selected step is chosen.
  • -
-
- Read more in the{' '} - - documentation. - -
-
- } - > - - -
- -
- ) -} diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx new file mode 100644 index 00000000000..aefca698f5a --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx @@ -0,0 +1,309 @@ +import { LemonLabel } from '@posthog/lemon-ui' +import { LemonInput } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' +import { getHogQLValue } from 'scenes/insights/filters/AggregationSelect' +import { teamLogic } from 'scenes/teamLogic' + +import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { ExperimentFunnelsQuery, NodeKind } from '~/queries/schema' +import { BreakdownAttributionType, FilterType, FunnelsFilterType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { + commonActionFilterProps, + FunnelAggregationSelect, + FunnelAttributionSelect, + FunnelConversionWindowFilter, +} from './Selectors' +export function PrimaryGoalFunnels(): JSX.Element { + const { currentTeam } = useValues(teamLogic) + const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { setExperiment, setFunnelsMetric } = useActions(experimentLogic) + const hasFilters = (currentTeam?.test_account_filters || []).length > 0 + + const metricIdx = 0 + const currentMetric = experiment.metrics[metricIdx] as ExperimentFunnelsQuery + + return ( + <> +
+ Name (optional) + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && ( + { + setFunnelsMetric({ + metricIdx, + name: newName, + }) + }} + /> + )} +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return queryNodeToFilter(currentMetric.funnels_query) + } + return experiment.filters + })()} + setFilters={({ actions, events, data_warehouse }: Partial): void => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.None + ) + + setFunnelsMetric({ + metricIdx, + series, + }) + } else { + if (actions?.length) { + setExperiment({ + filters: { + ...experiment.filters, + actions, + events: undefined, + data_warehouse: undefined, + }, + }) + } else if (events?.length) { + setExperiment({ + filters: { + ...experiment.filters, + events, + actions: undefined, + data_warehouse: undefined, + }, + }) + } else if (data_warehouse?.length) { + setExperiment({ + filters: { + ...experiment.filters, + data_warehouse, + actions: undefined, + events: undefined, + }, + }) + } + } + }} + typeKey="experiment-metric" + mathAvailability={MathAvailability.None} + buttonCopy="Add funnel step" + showSeriesIndicator={true} + seriesIndicatorType="numeric" + sortable={true} + showNestedArrow={true} + {...commonActionFilterProps} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return getHogQLValue( + currentMetric.funnels_query.aggregation_group_type_index ?? undefined, + currentMetric.funnels_query.funnelsFilter?.funnelAggregateByHogQL ?? undefined + ) + } + return getHogQLValue( + experiment.filters.aggregation_group_type_index, + (experiment.filters as FunnelsFilterType).funnel_aggregate_by_hogql + ) + })()} + onChange={(value) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelAggregateByHogQL: value, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + funnel_aggregate_by_hogql: value, + }, + }) + } + }} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.funnelsFilter?.funnelWindowInterval + } + return (experiment.filters as FunnelsFilterType).funnel_window_interval + })()} + funnelWindowIntervalUnit={(() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.funnelsFilter?.funnelWindowIntervalUnit + } + return (experiment.filters as FunnelsFilterType).funnel_window_interval_unit + })()} + onFunnelWindowIntervalChange={(funnelWindowInterval) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelWindowInterval: funnelWindowInterval, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + funnel_window_interval: funnelWindowInterval, + }, + }) + } + }} + onFunnelWindowIntervalUnitChange={(funnelWindowIntervalUnit) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelWindowIntervalUnit: funnelWindowIntervalUnit || undefined, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + funnel_window_interval_unit: funnelWindowIntervalUnit || undefined, + }, + }) + } + }} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + let breakdownAttributionType + let breakdownAttributionValue + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + breakdownAttributionType = + currentMetric.funnels_query?.funnelsFilter?.breakdownAttributionType + breakdownAttributionValue = + currentMetric.funnels_query?.funnelsFilter?.breakdownAttributionValue + } else { + breakdownAttributionType = (experiment.filters as FunnelsFilterType) + .breakdown_attribution_type + breakdownAttributionValue = (experiment.filters as FunnelsFilterType) + .breakdown_attribution_value + } + + const currentValue: BreakdownAttributionType | `${BreakdownAttributionType.Step}/${number}` = + !breakdownAttributionType + ? BreakdownAttributionType.FirstTouch + : breakdownAttributionType === BreakdownAttributionType.Step + ? `${breakdownAttributionType}/${breakdownAttributionValue || 0}` + : breakdownAttributionType + + return currentValue + })()} + onChange={(value) => { + const [breakdownAttributionType, breakdownAttributionValue] = (value || '').split('/') + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + breakdownAttributionType: breakdownAttributionType as BreakdownAttributionType, + breakdownAttributionValue: breakdownAttributionValue + ? parseInt(breakdownAttributionValue) + : undefined, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + breakdown_attribution_type: breakdownAttributionType as BreakdownAttributionType, + breakdown_attribution_value: breakdownAttributionValue + ? parseInt(breakdownAttributionValue) + : 0, + }, + }) + } + }} + stepsLength={(() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.series?.length + } + return Math.max( + experiment.filters.actions?.length ?? 0, + experiment.filters.events?.length ?? 0, + experiment.filters.data_warehouse?.length ?? 0 + ) + })()} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const val = (experiment.metrics[0] as ExperimentFunnelsQuery).funnels_query + ?.filterTestAccounts + return hasFilters ? !!val : false + } + return hasFilters ? !!experiment.filters.filter_test_accounts : false + })()} + onChange={(checked: boolean) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + filterTestAccounts: checked, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + filter_test_accounts: checked, + }, + }) + } + }} + fullWidth + /> +
+ {isExperimentRunning && ( + + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. + + )} +
+ {/* :FLAG: CLEAN UP AFTER MIGRATION */} + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query + } + return filtersToQueryNode(experiment.filters) + })(), + showTable: false, + showLastComputation: true, + showLastComputationRefresh: false, + }} + readOnly + /> +
+ + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx new file mode 100644 index 00000000000..0ce1cb72e33 --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx @@ -0,0 +1,160 @@ +import { LemonInput, LemonLabel } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' +import { teamLogic } from 'scenes/teamLogic' + +import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { ExperimentTrendsQuery, NodeKind } from '~/queries/schema' +import { FilterType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { commonActionFilterProps } from './Selectors' + +export function PrimaryGoalTrends(): JSX.Element { + const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { setExperiment, setTrendsMetric } = useActions(experimentLogic) + const { currentTeam } = useValues(teamLogic) + const hasFilters = (currentTeam?.test_account_filters || []).length > 0 + + const metricIdx = 0 + const currentMetric = experiment.metrics[metricIdx] as ExperimentTrendsQuery + + return ( + <> +
+ Name (optional) + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && ( + { + setTrendsMetric({ + metricIdx, + name: newName, + }) + }} + /> + )} +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return queryNodeToFilter(currentMetric.count_query) + } + return experiment.filters + })()} + setFilters={({ actions, events, data_warehouse }: Partial): void => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.All + ) + + setTrendsMetric({ + metricIdx, + series, + }) + } else { + if (actions?.length) { + setExperiment({ + filters: { + ...experiment.filters, + actions, + events: undefined, + data_warehouse: undefined, + }, + }) + } else if (events?.length) { + setExperiment({ + filters: { + ...experiment.filters, + events, + actions: undefined, + data_warehouse: undefined, + }, + }) + } else if (data_warehouse?.length) { + setExperiment({ + filters: { + ...experiment.filters, + data_warehouse, + actions: undefined, + events: undefined, + }, + }) + } + } + }} + typeKey="experiment-metric" + buttonCopy="Add graph series" + showSeriesIndicator={true} + entitiesLimit={1} + showNumericalPropsOnly={true} + {...commonActionFilterProps} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const val = currentMetric.count_query?.filterTestAccounts + return hasFilters ? !!val : false + } + return hasFilters ? !!experiment.filters.filter_test_accounts : false + })()} + onChange={(checked: boolean) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setTrendsMetric({ + metricIdx, + filterTestAccounts: checked, + }) + } else { + setExperiment({ + filters: { + ...experiment.filters, + filter_test_accounts: checked, + }, + }) + } + }} + fullWidth + /> +
+ {isExperimentRunning && ( + + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. + + )} +
+ {/* :FLAG: CLEAN UP AFTER MIGRATION */} + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.count_query + } + return filtersToQueryNode(experiment.filters) + })(), + showTable: false, + showLastComputation: true, + showLastComputationRefresh: false, + }} + readOnly + /> +
+ + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx new file mode 100644 index 00000000000..4ebe43c30e9 --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx @@ -0,0 +1,157 @@ +import { useActions, useValues } from 'kea' +import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' +import { teamLogic } from 'scenes/teamLogic' + +import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { ExperimentTrendsQuery, InsightQueryNode, NodeKind } from '~/queries/schema' +import { FilterType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { commonActionFilterProps } from './Selectors' + +export function PrimaryGoalTrendsExposure(): JSX.Element { + const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { setExperiment, setTrendsExposureMetric } = useActions(experimentLogic) + const { currentTeam } = useValues(teamLogic) + const hasFilters = (currentTeam?.test_account_filters || []).length > 0 + const currentMetric = experiment.metrics[0] as ExperimentTrendsQuery + + return ( + <> + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return queryNodeToFilter(currentMetric.exposure_query as InsightQueryNode) + } + return experiment.parameters.custom_exposure_filter as FilterType + })()} + setFilters={({ actions, events, data_warehouse }: Partial): void => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.All + ) + + setTrendsExposureMetric({ + metricIdx: 0, + series, + }) + } else { + if (actions?.length) { + setExperiment({ + parameters: { + ...experiment.parameters, + custom_exposure_filter: { + ...experiment.parameters.custom_exposure_filter, + actions, + events: undefined, + data_warehouse: undefined, + }, + }, + }) + } else if (events?.length) { + setExperiment({ + parameters: { + ...experiment.parameters, + custom_exposure_filter: { + ...experiment.parameters.custom_exposure_filter, + events, + actions: undefined, + data_warehouse: undefined, + }, + }, + }) + } else if (data_warehouse?.length) { + setExperiment({ + parameters: { + ...experiment.parameters, + custom_exposure_filter: { + ...experiment.parameters.custom_exposure_filter, + data_warehouse, + actions: undefined, + events: undefined, + }, + }, + }) + } + } + }} + typeKey="experiment-metric" + buttonCopy="Add graph series" + showSeriesIndicator={true} + entitiesLimit={1} + showNumericalPropsOnly={true} + {...commonActionFilterProps} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const val = currentMetric.exposure_query?.filterTestAccounts + return hasFilters ? !!val : false + } + return hasFilters + ? !!(experiment.parameters.custom_exposure_filter as FilterType).filter_test_accounts + : false + })()} + onChange={(checked: boolean) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setTrendsExposureMetric({ + metricIdx: 0, + filterTestAccounts: checked, + }) + } else { + setExperiment({ + parameters: { + ...experiment.parameters, + custom_exposure_filter: { + ...experiment.parameters.custom_exposure_filter, + filter_test_accounts: checked, + }, + }, + }) + } + }} + fullWidth + /> +
+ {isExperimentRunning && ( + + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. + + )} +
+ {/* :FLAG: CLEAN UP AFTER MIGRATION */} + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.exposure_query + } + return filtersToQueryNode(experiment.parameters.custom_exposure_filter as FilterType) + })(), + showTable: false, + showLastComputation: true, + showLastComputationRefresh: false, + }} + readOnly + /> +
+ + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx new file mode 100644 index 00000000000..14fd6c7d4e9 --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx @@ -0,0 +1,98 @@ +import { LemonButton, LemonModal, LemonSelect } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' + +import { ExperimentFunnelsQuery } from '~/queries/schema' +import { Experiment, InsightType } from '~/types' + +import { experimentLogic, getDefaultFilters, getDefaultFunnelsMetric, getDefaultTrendsMetric } from '../experimentLogic' +import { PrimaryGoalFunnels } from '../Metrics/PrimaryGoalFunnels' +import { PrimaryGoalTrends } from '../Metrics/PrimaryGoalTrends' + +export function PrimaryMetricModal({ + experimentId, + isOpen, + onClose, +}: { + experimentId: Experiment['id'] + isOpen: boolean + onClose: () => void +}): JSX.Element { + const { experiment, experimentLoading, getMetricType, featureFlags } = useValues(experimentLogic({ experimentId })) + const { updateExperimentGoal, setExperiment } = useActions(experimentLogic({ experimentId })) + + const metricIdx = 0 + const metricType = getMetricType(metricIdx) + + let funnelStepsLength = 0 + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && metricType === InsightType.FUNNELS) { + const metric = experiment.metrics[metricIdx] as ExperimentFunnelsQuery + funnelStepsLength = metric?.funnels_query?.series?.length || 0 + } else { + funnelStepsLength = (experiment.filters?.events?.length || 0) + (experiment.filters?.actions?.length || 0) + } + + return ( + + + Cancel + + { + updateExperimentGoal(experiment.filters) + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
+ } + > +
+ Metric type + { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setExperiment({ + ...experiment, + metrics: [ + ...experiment.metrics.slice(0, metricIdx), + newMetricType === InsightType.TRENDS + ? getDefaultTrendsMetric() + : getDefaultFunnelsMetric(), + ...experiment.metrics.slice(metricIdx + 1), + ], + }) + } else { + setExperiment({ + ...experiment, + filters: getDefaultFilters(newMetricType, undefined), + }) + } + }} + options={[ + { value: InsightType.TRENDS, label: Trends }, + { value: InsightType.FUNNELS, label: Funnels }, + ]} + /> +
+ {metricType === InsightType.TRENDS ? : } + + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryTrendsExposureModal.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryTrendsExposureModal.tsx new file mode 100644 index 00000000000..7c4f49c114a --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/PrimaryTrendsExposureModal.tsx @@ -0,0 +1,57 @@ +import { LemonButton, LemonModal } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' + +import { Experiment } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { PrimaryGoalTrendsExposure } from '../Metrics/PrimaryGoalTrendsExposure' + +export function PrimaryTrendsExposureModal({ + experimentId, + isOpen, + onClose, +}: { + experimentId: Experiment['id'] + isOpen: boolean + onClose: () => void +}): JSX.Element { + const { experiment, experimentLoading, featureFlags } = useValues(experimentLogic({ experimentId })) + const { updateExperimentExposure, updateExperiment } = useActions(experimentLogic({ experimentId })) + + return ( + + + Cancel + + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + updateExperiment({ + metrics: experiment.metrics, + }) + } else { + updateExperimentExposure(experiment.parameters.custom_exposure_filter ?? null) + } + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
+ } + > + + + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/SecondaryGoalFunnels.tsx b/frontend/src/scenes/experiments/Metrics/SecondaryGoalFunnels.tsx new file mode 100644 index 00000000000..a0e903fdeab --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/SecondaryGoalFunnels.tsx @@ -0,0 +1,391 @@ +import { LemonLabel } from '@posthog/lemon-ui' +import { LemonInput } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' +import { getHogQLValue } from 'scenes/insights/filters/AggregationSelect' +import { teamLogic } from 'scenes/teamLogic' + +import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { ExperimentFunnelsQuery, NodeKind } from '~/queries/schema' +import { BreakdownAttributionType, FilterType, FunnelsFilterType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { + commonActionFilterProps, + FunnelAggregationSelect, + FunnelAttributionSelect, + FunnelConversionWindowFilter, +} from './Selectors' + +export function SecondaryGoalFunnels({ metricIdx }: { metricIdx: number }): JSX.Element { + const { currentTeam } = useValues(teamLogic) + const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { setExperiment, setFunnelsMetric } = useActions(experimentLogic) + const hasFilters = (currentTeam?.test_account_filters || []).length > 0 + const currentMetric = experiment.metrics_secondary[metricIdx] as ExperimentFunnelsQuery + + return ( + <> +
+ Name (optional) + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.name + } + return experiment.secondary_metrics[metricIdx].name + })()} + onChange={(newName) => { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + name: newName, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx ? { ...metric, name: newName } : metric + ), + }) + } + }} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return queryNodeToFilter(currentMetric.funnels_query) + } + return experiment.secondary_metrics[metricIdx].filters + })()} + setFilters={({ actions, events, data_warehouse }: Partial): void => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.None + ) + + setFunnelsMetric({ + metricIdx, + series, + isSecondary: true, + }) + } else { + if (actions?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + actions, + events: undefined, + data_warehouse: undefined, + }, + } + : metric + ), + }) + } else if (events?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + events, + actions: undefined, + data_warehouse: undefined, + }, + } + : metric + ), + }) + } else if (data_warehouse?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + data_warehouse, + actions: undefined, + events: undefined, + }, + } + : metric + ), + }) + } + } + }} + typeKey="experiment-metric" + mathAvailability={MathAvailability.None} + buttonCopy="Add funnel step" + showSeriesIndicator={true} + seriesIndicatorType="numeric" + sortable={true} + showNestedArrow={true} + {...commonActionFilterProps} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return getHogQLValue( + currentMetric.funnels_query.aggregation_group_type_index ?? undefined, + currentMetric.funnels_query.funnelsFilter?.funnelAggregateByHogQL ?? undefined + ) + } + return getHogQLValue( + experiment.secondary_metrics[metricIdx].filters.aggregation_group_type_index, + (experiment.secondary_metrics[metricIdx].filters as FunnelsFilterType) + .funnel_aggregate_by_hogql + ) + })()} + onChange={(value) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelAggregateByHogQL: value, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + funnel_aggregate_by_hogql: value, + }, + } + : metric + ), + }) + } + }} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.funnelsFilter?.funnelWindowInterval + } + return (experiment.secondary_metrics[metricIdx].filters as FunnelsFilterType) + .funnel_window_interval + })()} + funnelWindowIntervalUnit={(() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.funnelsFilter?.funnelWindowIntervalUnit + } + return (experiment.secondary_metrics[metricIdx].filters as FunnelsFilterType) + .funnel_window_interval_unit + })()} + onFunnelWindowIntervalChange={(funnelWindowInterval) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelWindowInterval: funnelWindowInterval, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + funnel_window_interval: funnelWindowInterval, + }, + } + : metric + ), + }) + } + }} + onFunnelWindowIntervalUnitChange={(funnelWindowIntervalUnit) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + funnelWindowIntervalUnit: funnelWindowIntervalUnit || undefined, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + funnel_window_interval_unit: funnelWindowIntervalUnit || undefined, + }, + } + : metric + ), + }) + } + }} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + let breakdownAttributionType + let breakdownAttributionValue + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + breakdownAttributionType = + currentMetric.funnels_query?.funnelsFilter?.breakdownAttributionType + breakdownAttributionValue = + currentMetric.funnels_query?.funnelsFilter?.breakdownAttributionValue + } else { + breakdownAttributionType = ( + experiment.secondary_metrics[metricIdx].filters as FunnelsFilterType + ).breakdown_attribution_type + breakdownAttributionValue = ( + experiment.secondary_metrics[metricIdx].filters as FunnelsFilterType + ).breakdown_attribution_value + } + + const currentValue: BreakdownAttributionType | `${BreakdownAttributionType.Step}/${number}` = + !breakdownAttributionType + ? BreakdownAttributionType.FirstTouch + : breakdownAttributionType === BreakdownAttributionType.Step + ? `${breakdownAttributionType}/${breakdownAttributionValue || 0}` + : breakdownAttributionType + + return currentValue + })()} + onChange={(value) => { + const [breakdownAttributionType, breakdownAttributionValue] = (value || '').split('/') + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + breakdownAttributionType: breakdownAttributionType as BreakdownAttributionType, + breakdownAttributionValue: breakdownAttributionValue + ? parseInt(breakdownAttributionValue) + : undefined, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + breakdown_attribution_type: + breakdownAttributionType as BreakdownAttributionType, + breakdown_attribution_value: breakdownAttributionValue + ? parseInt(breakdownAttributionValue) + : 0, + }, + } + : metric + ), + }) + } + }} + stepsLength={(() => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query?.series?.length + } + return Math.max( + experiment.secondary_metrics[metricIdx].filters.actions?.length ?? 0, + experiment.secondary_metrics[metricIdx].filters.events?.length ?? 0, + experiment.secondary_metrics[metricIdx].filters.data_warehouse?.length ?? 0 + ) + })()} + /> + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const val = (experiment.metrics_secondary[metricIdx] as ExperimentFunnelsQuery) + .funnels_query?.filterTestAccounts + return hasFilters ? !!val : false + } + return hasFilters + ? !!experiment.secondary_metrics[metricIdx].filters.filter_test_accounts + : false + })()} + onChange={(checked: boolean) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setFunnelsMetric({ + metricIdx, + filterTestAccounts: checked, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + filter_test_accounts: checked, + }, + } + : metric + ), + }) + } + }} + fullWidth + /> +
+ {isExperimentRunning && ( + + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. + + )} +
+ {/* :FLAG: CLEAN UP AFTER MIGRATION */} + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.funnels_query + } + return filtersToQueryNode(experiment.secondary_metrics[metricIdx].filters) + })(), + showTable: false, + showLastComputation: true, + showLastComputationRefresh: false, + }} + readOnly + /> +
+ + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/SecondaryGoalTrends.tsx b/frontend/src/scenes/experiments/Metrics/SecondaryGoalTrends.tsx new file mode 100644 index 00000000000..20aae645e6e --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/SecondaryGoalTrends.tsx @@ -0,0 +1,204 @@ +import { LemonLabel } from '@posthog/lemon-ui' +import { LemonInput } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' +import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' +import { teamLogic } from 'scenes/teamLogic' + +import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { ExperimentTrendsQuery, NodeKind } from '~/queries/schema' +import { FilterType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { commonActionFilterProps } from './Selectors' + +export function SecondaryGoalTrends({ metricIdx }: { metricIdx: number }): JSX.Element { + const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { setExperiment, setTrendsMetric } = useActions(experimentLogic) + const { currentTeam } = useValues(teamLogic) + const hasFilters = (currentTeam?.test_account_filters || []).length > 0 + const currentMetric = experiment.metrics_secondary[metricIdx] as ExperimentTrendsQuery + + return ( + <> +
+ Name (optional) + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.name + } + return experiment.secondary_metrics[metricIdx].name + })()} + onChange={(newName) => { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setTrendsMetric({ + metricIdx, + name: newName, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx ? { ...metric, name: newName } : metric + ), + }) + } + }} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return queryNodeToFilter(currentMetric.count_query) + } + return experiment.secondary_metrics[metricIdx].filters + })()} + setFilters={({ actions, events, data_warehouse }: Partial): void => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.All + ) + + setTrendsMetric({ + metricIdx, + series, + isSecondary: true, + }) + } else { + if (actions?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + actions, + events: undefined, + data_warehouse: undefined, + }, + } + : metric + ), + }) + } else if (events?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + events, + actions: undefined, + data_warehouse: undefined, + }, + } + : metric + ), + }) + } else if (data_warehouse?.length) { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + data_warehouse, + actions: undefined, + events: undefined, + }, + } + : metric + ), + }) + } + } + }} + typeKey="experiment-metric" + buttonCopy="Add graph series" + showSeriesIndicator={true} + entitiesLimit={1} + showNumericalPropsOnly={true} + {...commonActionFilterProps} + /> +
+ { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const val = currentMetric.count_query?.filterTestAccounts + return hasFilters ? !!val : false + } + return hasFilters + ? !!experiment.secondary_metrics[metricIdx].filters.filter_test_accounts + : false + })()} + onChange={(checked: boolean) => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setTrendsMetric({ + metricIdx, + filterTestAccounts: checked, + isSecondary: true, + }) + } else { + setExperiment({ + secondary_metrics: experiment.secondary_metrics.map((metric, idx) => + idx === metricIdx + ? { + ...metric, + filters: { + ...metric.filters, + filter_test_accounts: checked, + }, + } + : metric + ), + }) + } + }} + fullWidth + /> +
+ {isExperimentRunning && ( + + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. + + )} +
+ {/* :FLAG: CLEAN UP AFTER MIGRATION */} + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return currentMetric.count_query + } + return filtersToQueryNode(experiment.secondary_metrics[metricIdx].filters) + })(), + showTable: false, + showLastComputation: true, + showLastComputationRefresh: false, + }} + readOnly + /> +
+ + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/SecondaryMetricChartModal.tsx b/frontend/src/scenes/experiments/Metrics/SecondaryMetricChartModal.tsx new file mode 100644 index 00000000000..ec540aa43c0 --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/SecondaryMetricChartModal.tsx @@ -0,0 +1,38 @@ +import { LemonButton, LemonModal } from '@posthog/lemon-ui' +import { useValues } from 'kea' + +import { Experiment } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { ResultsQuery } from '../ExperimentView/components' + +export function SecondaryMetricChartModal({ + experimentId, + metricIdx, + isOpen, + onClose, +}: { + experimentId: Experiment['id'] + metricIdx: number + isOpen: boolean + onClose: () => void +}): JSX.Element { + const { secondaryMetricResults } = useValues(experimentLogic({ experimentId })) + const targetResults = secondaryMetricResults && secondaryMetricResults[metricIdx] + + return ( + + Close + + } + > + + + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx b/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx new file mode 100644 index 00000000000..14a8304b973 --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx @@ -0,0 +1,137 @@ +import { LemonButton, LemonModal, LemonSelect } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' + +import { Experiment, InsightType } from '~/types' + +import { experimentLogic, getDefaultFilters, getDefaultFunnelsMetric, getDefaultTrendsMetric } from '../experimentLogic' +import { SecondaryGoalFunnels } from './SecondaryGoalFunnels' +import { SecondaryGoalTrends } from './SecondaryGoalTrends' + +export function SecondaryMetricModal({ + experimentId, + metricIdx, + isOpen, + onClose, +}: { + experimentId: Experiment['id'] + metricIdx: number + isOpen: boolean + onClose: () => void +}): JSX.Element { + const { experiment, experimentLoading, getSecondaryMetricType, featureFlags } = useValues( + experimentLogic({ experimentId }) + ) + const { setExperiment, updateExperiment } = useActions(experimentLogic({ experimentId })) + const metricType = getSecondaryMetricType(metricIdx) + + return ( + + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const newMetricsSecondary = experiment.metrics_secondary.filter( + (_, idx) => idx !== metricIdx + ) + setExperiment({ + metrics_secondary: newMetricsSecondary, + }) + updateExperiment({ + metrics_secondary: newMetricsSecondary, + }) + } else { + const newSecondaryMetrics = experiment.secondary_metrics.filter( + (_, idx) => idx !== metricIdx + ) + setExperiment({ + secondary_metrics: newSecondaryMetrics, + }) + updateExperiment({ + secondary_metrics: newSecondaryMetrics, + }) + } + }} + > + Delete + +
+ + Cancel + + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + updateExperiment({ + metrics_secondary: experiment.metrics_secondary, + }) + } else { + updateExperiment({ + secondary_metrics: experiment.secondary_metrics, + }) + } + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
+
+ } + > +
+ Metric type + { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + setExperiment({ + ...experiment, + metrics_secondary: [ + ...experiment.metrics_secondary.slice(0, metricIdx), + newMetricType === InsightType.TRENDS + ? getDefaultTrendsMetric() + : getDefaultFunnelsMetric(), + ...experiment.metrics_secondary.slice(metricIdx + 1), + ], + }) + } else { + setExperiment({ + ...experiment, + secondary_metrics: [ + ...experiment.secondary_metrics.slice(0, metricIdx), + newMetricType === InsightType.TRENDS + ? { name: '', filters: getDefaultFilters(InsightType.TRENDS, undefined) } + : { name: '', filters: getDefaultFilters(InsightType.FUNNELS, undefined) }, + ...experiment.secondary_metrics.slice(metricIdx + 1), + ], + }) + } + }} + options={[ + { value: InsightType.TRENDS, label: Trends }, + { value: InsightType.FUNNELS, label: Funnels }, + ]} + /> +
+ {metricType === InsightType.TRENDS ? ( + + ) : ( + + )} + + ) +} diff --git a/frontend/src/scenes/experiments/Metrics/Selectors.tsx b/frontend/src/scenes/experiments/Metrics/Selectors.tsx new file mode 100644 index 00000000000..49e11eba7ad --- /dev/null +++ b/frontend/src/scenes/experiments/Metrics/Selectors.tsx @@ -0,0 +1,253 @@ +import { IconInfo } from '@posthog/icons' +import { LemonInput, LemonSelect, LemonSelectOption, LemonSelectSection, Link } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { HogQLEditor } from 'lib/components/HogQLEditor/HogQLEditor' +import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { groupsAccessLogic } from 'lib/introductions/groupsAccessLogic' +import { Tooltip } from 'lib/lemon-ui/Tooltip' +import { capitalizeFirstLetter, pluralize } from 'lib/utils' +import { GroupIntroductionFooter } from 'scenes/groups/GroupsIntroduction' +import { FUNNEL_STEP_COUNT_LIMIT } from 'scenes/insights/EditorFilters/FunnelsQuerySteps' +import { TIME_INTERVAL_BOUNDS } from 'scenes/insights/views/Funnels/FunnelConversionWindowFilter' + +import { groupsModel } from '~/models/groupsModel' +import { BreakdownAttributionType, FunnelConversionWindowTimeUnit, StepOrderValue } from '~/types' + +export const commonActionFilterProps = { + actionsTaxonomicGroupTypes: [ + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Actions, + TaxonomicFilterGroupType.DataWarehouse, + ], + propertiesTaxonomicGroupTypes: [ + TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.PersonProperties, + TaxonomicFilterGroupType.EventFeatureFlags, + TaxonomicFilterGroupType.Cohorts, + TaxonomicFilterGroupType.Elements, + TaxonomicFilterGroupType.SessionProperties, + TaxonomicFilterGroupType.HogQLExpression, + TaxonomicFilterGroupType.DataWarehouseProperties, + TaxonomicFilterGroupType.DataWarehousePersonProperties, + ], +} + +// Forked from https://github.com/PostHog/posthog/blob/master/frontend/src/scenes/insights/filters/AggregationSelect.tsx +export function FunnelAggregationSelect({ + value, + onChange, +}: { + value: string + onChange: (value: string) => void +}): JSX.Element { + const { groupTypes, aggregationLabel } = useValues(groupsModel) + const { needsUpgradeForGroups, canStartUsingGroups } = useValues(groupsAccessLogic) + + const UNIQUE_USERS = 'person_id' + const baseValues = [UNIQUE_USERS] + const optionSections: LemonSelectSection[] = [ + { + title: 'Event Aggregation', + options: [ + { + value: UNIQUE_USERS, + label: 'Unique users', + }, + ], + }, + ] + if (needsUpgradeForGroups || canStartUsingGroups) { + // if (false) { + optionSections[0].footer = + } else { + Array.from(groupTypes.values()).forEach((groupType) => { + baseValues.push(`$group_${groupType.group_type_index}`) + optionSections[0].options.push({ + value: `$group_${groupType.group_type_index}`, + label: `Unique ${aggregationLabel(groupType.group_type_index).plural}`, + }) + }) + } + + baseValues.push(`properties.$session_id`) + optionSections[0].options.push({ + value: 'properties.$session_id', + label: `Unique sessions`, + }) + optionSections[0].options.push({ + label: 'Custom HogQL expression', + options: [ + { + // This is a bit of a hack so that the HogQL option is only highlighted as active when the user has + // set a custom value (because actually _all_ the options are HogQL) + value: !value || baseValues.includes(value) ? '' : value, + label: {value}, + labelInMenu: function CustomHogQLOptionWrapped({ onSelect }) { + return ( + // eslint-disable-next-line react/forbid-dom-props +
+ +
+ ) + }, + }, + ], + }) + + return ( +
+ Aggregating by + +
+ ) +} + +// Forked from https://github.com/PostHog/posthog/blob/master/frontend/src/scenes/insights/views/Funnels/FunnelConversionWindowFilter.tsx +export function FunnelConversionWindowFilter({ + funnelWindowInterval, + funnelWindowIntervalUnit, + onFunnelWindowIntervalChange, + onFunnelWindowIntervalUnitChange, +}: { + funnelWindowInterval: number | undefined + funnelWindowIntervalUnit: FunnelConversionWindowTimeUnit | undefined + onFunnelWindowIntervalChange: (funnelWindowInterval: number | undefined) => void + onFunnelWindowIntervalUnitChange: (funnelWindowIntervalUnit: FunnelConversionWindowTimeUnit) => void +}): JSX.Element { + const options: LemonSelectOption[] = Object.keys(TIME_INTERVAL_BOUNDS).map( + (unit) => ({ + label: capitalizeFirstLetter(pluralize(funnelWindowInterval ?? 7, unit, `${unit}s`, false)), + value: unit as FunnelConversionWindowTimeUnit, + }) + ) + const intervalBounds = TIME_INTERVAL_BOUNDS[funnelWindowIntervalUnit ?? FunnelConversionWindowTimeUnit.Day] + + return ( +
+ + Conversion window limit + + Recommended! Limit to participants that converted within a specific time frame. + Participants that do not convert in this time frame will be considered as drop-offs. + + } + > + + + +
+ + +
+
+ ) +} + +// Forked from https://github.com/PostHog/posthog/blob/master/frontend/src/scenes/insights/EditorFilters/AttributionFilter.tsx +export function FunnelAttributionSelect({ + value, + onChange, + stepsLength, +}: { + value: BreakdownAttributionType | `${BreakdownAttributionType.Step}/${number}` + onChange: (value: BreakdownAttributionType | `${BreakdownAttributionType.Step}/${number}`) => void + stepsLength: number +}): JSX.Element { + const funnelOrderType = undefined + + return ( +
+
+ Attribution type + +
+ When breaking down funnels, it's possible that the same properties don't exist on every + event. For example, if you want to break down by browser on a funnel that contains both + frontend and backend events. +
+
+ In this case, you can choose from which step the properties should be selected from by + modifying the attribution type. There are four modes to choose from: +
+
    +
  • First touchpoint: the first property value seen in any of the steps is chosen.
  • +
  • Last touchpoint: the last property value seen from all steps is chosen.
  • +
  • + All steps: the property value must be seen in all steps to be considered in the + funnel. +
  • +
  • Specific step: only the property value seen at the selected step is chosen.
  • +
+
+ Read more in the{' '} + + documentation. + +
+
+ } + > + + +
+ ({ + value: `${BreakdownAttributionType.Step}/${stepIndex}` as const, + label: `Step ${stepIndex + 1}`, + hidden: stepIndex >= stepsLength, + })), + hidden: funnelOrderType === StepOrderValue.UNORDERED, + }, + ]} + onChange={onChange} + dropdownMaxContentWidth={true} + data-attr="breakdown-attributions" + /> +
+ ) +} diff --git a/frontend/src/scenes/experiments/constants.ts b/frontend/src/scenes/experiments/constants.ts index 4c40c381ea4..7ce7c3a4995 100644 --- a/frontend/src/scenes/experiments/constants.ts +++ b/frontend/src/scenes/experiments/constants.ts @@ -2,6 +2,12 @@ import { InsightShortId } from '~/types' // :TRICKY: `new-` prefix indicates an unsaved insight and slightly alters // behaviour of insight related logics -export const EXPERIMENT_INSIGHT_ID = 'new-experiment-insight' as InsightShortId -export const EXPERIMENT_EXPOSURE_INSIGHT_ID = 'new-experiment-exposure-insight' as InsightShortId export const SECONDARY_METRIC_INSIGHT_ID = 'new-secondary-metric-insight' as InsightShortId + +export enum MetricInsightId { + Trends = 'new-experiment-trends-metric', + TrendsExposure = 'new-experiment-trends-exposure', + Funnels = 'new-experiment-funnels-metric', + SecondaryTrends = 'new-experiment-secondary-trends', + SecondaryFunnels = 'new-experiment-secondary-funnels', +} diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 5ec2c1a5fe2..3746da15d5e 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -16,7 +16,6 @@ import { ReactElement } from 'react' import { validateFeatureFlagKey } from 'scenes/feature-flags/featureFlagLogic' import { funnelDataLogic } from 'scenes/funnels/funnelDataLogic' import { insightDataLogic } from 'scenes/insights/insightDataLogic' -import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { cleanFilters, getDefaultEvent } from 'scenes/insights/utils/cleanFilters' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' @@ -26,41 +25,41 @@ import { urls } from 'scenes/urls' import { cohortsModel } from '~/models/cohortsModel' import { groupsModel } from '~/models/groupsModel' -import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' -import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { performQuery } from '~/queries/query' import { CachedExperimentFunnelsQueryResponse, CachedExperimentTrendsQueryResponse, + ExperimentFunnelsQuery, ExperimentTrendsQuery, - FunnelsQuery, - InsightVizNode, NodeKind, - TrendsQuery, } from '~/queries/schema' -import { isFunnelsQuery } from '~/queries/utils' import { ActionFilter as ActionFilterType, Breadcrumb, + BreakdownAttributionType, + ChartDisplayType, CohortType, CountPerActorMathType, + EntityTypes, Experiment, ExperimentResults, FeatureFlagType, FilterType, + FunnelConversionWindowTimeUnit, FunnelExperimentVariant, FunnelStep, FunnelVizType, InsightType, MultivariateFlagVariant, PropertyMathType, - SecondaryExperimentMetric, SecondaryMetricResults, SignificanceCode, TrendExperimentVariant, TrendResult, + TrendsFilterType, } from '~/types' -import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from './constants' +import { MetricInsightId } from './constants' import type { experimentLogicType } from './experimentLogicType' import { experimentsLogic } from './experimentsLogic' import { holdoutsLogic } from './holdoutsLogic' @@ -73,6 +72,7 @@ const NEW_EXPERIMENT: Experiment = { feature_flag_key: '', filters: {}, metrics: [], + metrics_secondary: [], parameters: { feature_flag_variants: [ { key: 'control', rollout_percentage: 50 }, @@ -105,12 +105,14 @@ export interface ExperimentResultCalculationError { statusCode: number } +// :FLAG: CLEAN UP AFTER MIGRATION export interface CachedSecondaryMetricExperimentFunnelsQueryResponse extends CachedExperimentFunnelsQueryResponse { filters?: { insight?: InsightType } } +// :FLAG: CLEAN UP AFTER MIGRATION export interface CachedSecondaryMetricExperimentTrendsQueryResponse extends CachedExperimentTrendsQueryResponse { filters?: { insight?: InsightType @@ -129,16 +131,20 @@ export const experimentLogic = kea([ ['aggregationLabel', 'groupTypes', 'showGroupsOptions'], sceneLogic, ['activeScene'], - funnelDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), - ['results as funnelResults', 'conversionMetrics'], - trendsDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), - ['results as trendResults'], - insightDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), - ['insightDataLoading as goalInsightDataLoading'], featureFlagLogic, ['featureFlags'], holdoutsLogic, ['holdouts'], + // Hook the insight state to get the results for the sample size estimation + funnelDataLogic({ dashboardItemId: MetricInsightId.Funnels }), + ['results as funnelResults', 'conversionMetrics'], + trendsDataLogic({ dashboardItemId: MetricInsightId.Trends }), + ['results as trendResults'], + // Hook into the loading state of the metric insight + insightDataLogic({ dashboardItemId: MetricInsightId.Trends }), + ['insightDataLoading as trendMetricInsightLoading'], + insightDataLogic({ dashboardItemId: MetricInsightId.Funnels }), + ['insightDataLoading as funnelMetricInsightLoading'], ], actions: [ experimentsLogic, @@ -157,24 +163,13 @@ export const experimentLogic = kea([ 'reportExperimentResultsLoadingTimeout', 'reportExperimentReleaseConditionsViewed', ], - insightDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), - ['setQuery'], - insightVizDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), - ['updateQuerySource'], - insightDataLogic({ dashboardItemId: EXPERIMENT_EXPOSURE_INSIGHT_ID }), - ['setQuery as setExposureQuery'], - insightVizDataLogic({ dashboardItemId: EXPERIMENT_EXPOSURE_INSIGHT_ID }), - ['updateQuerySource as updateExposureQuerySource'], ], })), actions({ setExperimentMissing: true, setExperiment: (experiment: Partial) => ({ experiment }), createExperiment: (draft?: boolean) => ({ draft }), - setExperimentFeatureFlagKeyFromName: true, - setNewExperimentInsight: (filters?: Partial) => ({ filters }), setExperimentType: (type?: string) => ({ type }), - setExperimentExposureInsight: (filters?: Partial) => ({ filters }), removeExperimentGroup: (idx: number) => ({ idx }), setEditExperiment: (editing: boolean) => ({ editing }), setExperimentResultCalculationError: (error: ExperimentResultCalculationError) => ({ error }), @@ -183,7 +178,6 @@ export const experimentLogic = kea([ updateExperimentGoal: (filters: Partial) => ({ filters }), updateExperimentCollectionGoal: true, updateExperimentExposure: (filters: Partial | null) => ({ filters }), - updateExperimentSecondaryMetrics: (metrics: SecondaryExperimentMetric[]) => ({ metrics }), changeExperimentStartDate: (startDate: string) => ({ startDate }), launchExperiment: true, endExperiment: true, @@ -191,10 +185,6 @@ export const experimentLogic = kea([ archiveExperiment: true, resetRunningExperiment: true, checkFlagImplementationWarning: true, - openExperimentGoalModal: true, - closeExperimentGoalModal: true, - openExperimentExposureModal: true, - closeExperimentExposureModal: true, openExperimentCollectionGoalModal: true, closeExperimentCollectionGoalModal: true, openShipVariantModal: true, @@ -203,7 +193,70 @@ export const experimentLogic = kea([ closeDistributionModal: true, openReleaseConditionsModal: true, closeReleaseConditionsModal: true, - updateExperimentVariantImages: (variantPreviewMediaIds: Record) => ({ variantPreviewMediaIds }), + updateExperimentVariantImages: (variantPreviewMediaIds: Record) => ({ + variantPreviewMediaIds, + }), + setTrendsMetric: ({ + metricIdx, + name, + series, + filterTestAccounts, + isSecondary = false, + }: { + metricIdx: number + name?: string + series?: any[] + filterTestAccounts?: boolean + isSecondary?: boolean + }) => ({ metricIdx, name, series, filterTestAccounts, isSecondary }), + setTrendsExposureMetric: ({ + metricIdx, + name, + series, + filterTestAccounts, + }: { + metricIdx: number + name?: string + series?: any[] + filterTestAccounts?: boolean + }) => ({ metricIdx, name, series, filterTestAccounts }), + setFunnelsMetric: ({ + metricIdx, + name, + series, + filterTestAccounts, + breakdownAttributionType, + breakdownAttributionValue, + funnelWindowInterval, + funnelWindowIntervalUnit, + aggregation_group_type_index, + funnelAggregateByHogQL, + isSecondary = false, + }: { + metricIdx: number + name?: string + series?: any[] + filterTestAccounts?: boolean + breakdownAttributionType?: BreakdownAttributionType + breakdownAttributionValue?: number + funnelWindowInterval?: number + funnelWindowIntervalUnit?: string + aggregation_group_type_index?: number + funnelAggregateByHogQL?: string + isSecondary?: boolean + }) => ({ + metricIdx, + name, + series, + filterTestAccounts, + breakdownAttributionType, + breakdownAttributionValue, + funnelWindowInterval, + funnelWindowIntervalUnit, + aggregation_group_type_index, + funnelAggregateByHogQL, + isSecondary, + }), setTabKey: (tabKey: string) => ({ tabKey }), }), reducers({ @@ -211,15 +264,6 @@ export const experimentLogic = kea([ { ...NEW_EXPERIMENT } as Experiment, { setExperiment: (state, { experiment }) => { - if (experiment.filters) { - return { ...state, ...experiment, filters: experiment.filters } - } - - // assuming setExperiment isn't called with new filters & parameters at the same time - if (experiment.parameters) { - const newParameters = { ...state?.parameters, ...experiment.parameters } - return { ...state, ...experiment, parameters: newParameters } - } return { ...state, ...experiment } }, addExperimentGroup: (state) => { @@ -271,6 +315,89 @@ export const experimentLogic = kea([ }, } }, + setTrendsMetric: (state, { metricIdx, name, series, filterTestAccounts, isSecondary }) => { + const metricsKey = isSecondary ? 'metrics_secondary' : 'metrics' + const metrics = [...(state?.[metricsKey] || [])] + const metric = metrics[metricIdx] + + metrics[metricIdx] = { + ...metric, + ...(name !== undefined && { name }), + count_query: { + ...(metric as ExperimentTrendsQuery).count_query, + ...(series && { series }), + ...(filterTestAccounts !== undefined && { filterTestAccounts }), + }, + } as ExperimentTrendsQuery + + return { + ...state, + [metricsKey]: metrics, + } + }, + setTrendsExposureMetric: (state, { metricIdx, name, series, filterTestAccounts }) => { + const metrics = [...(state?.metrics || [])] + const metric = metrics[metricIdx] + + metrics[metricIdx] = { + ...metric, + ...(name !== undefined && { name }), + exposure_query: { + ...(metric as ExperimentTrendsQuery).exposure_query, + ...(series && { series }), + ...(filterTestAccounts !== undefined && { filterTestAccounts }), + }, + } as ExperimentTrendsQuery + + return { + ...state, + metrics, + } + }, + setFunnelsMetric: ( + state, + { + metricIdx, + name, + series, + filterTestAccounts, + breakdownAttributionType, + breakdownAttributionValue, + funnelWindowInterval, + funnelWindowIntervalUnit, + aggregation_group_type_index, + funnelAggregateByHogQL, + isSecondary, + } + ) => { + const metricsKey = isSecondary ? 'metrics_secondary' : 'metrics' + const metrics = [...(state?.[metricsKey] || [])] + const metric = metrics[metricIdx] + + metrics[metricIdx] = { + ...metric, + ...(name !== undefined && { name }), + funnels_query: { + ...(metric as ExperimentFunnelsQuery).funnels_query, + ...(series && { series }), + ...(filterTestAccounts !== undefined && { filterTestAccounts }), + ...(aggregation_group_type_index !== undefined && { aggregation_group_type_index }), + funnelsFilter: { + ...(metric as ExperimentFunnelsQuery).funnels_query.funnelsFilter, + ...(breakdownAttributionType && { breakdownAttributionType }), + ...(breakdownAttributionValue !== undefined && { breakdownAttributionValue }), + ...(funnelWindowInterval !== undefined && { funnelWindowInterval }), + ...(funnelWindowIntervalUnit && { funnelWindowIntervalUnit }), + ...(funnelAggregateByHogQL !== undefined && { funnelAggregateByHogQL }), + }, + }, + } as ExperimentFunnelsQuery + + return { + ...state, + [metricsKey]: metrics, + } + }, }, ], experimentMissing: [ @@ -285,22 +412,6 @@ export const experimentLogic = kea([ setEditExperiment: (_, { editing }) => editing, }, ], - changingGoalMetric: [ - false, - { - updateExperimentGoal: () => true, - updateExperimentExposure: () => true, - changeExperimentStartDate: () => true, - loadExperimentResults: () => false, - }, - ], - changingSecondaryMetrics: [ - false, - { - updateExperimentSecondaryMetrics: () => true, - loadSecondaryMetricResults: () => false, - }, - ], experimentResultCalculationError: [ null as ExperimentResultCalculationError | null, { @@ -313,27 +424,6 @@ export const experimentLogic = kea([ setFlagImplementationWarning: (_, { warning }) => warning, }, ], - // TODO: delete with the old UI - exposureAndSampleSize: [ - { exposure: 0, sampleSize: 0 } as { exposure: number; sampleSize: number }, - { - setExposureAndSampleSize: (_, { exposure, sampleSize }) => ({ exposure, sampleSize }), - }, - ], - isExperimentGoalModalOpen: [ - false, - { - openExperimentGoalModal: () => true, - closeExperimentGoalModal: () => false, - }, - ], - isExperimentExposureModalOpen: [ - false, - { - openExperimentExposureModal: () => true, - closeExperimentExposureModal: () => false, - }, - ], isExperimentCollectionGoalModalOpen: [ false, { @@ -467,79 +557,6 @@ export const experimentLogic = kea([ setExperimentType: async ({ type }) => { actions.setExperiment({ type: type }) }, - setNewExperimentInsight: async ({ filters }) => { - let newInsightFilters - const aggregationGroupTypeIndex = values.experiment.parameters?.aggregation_group_type_index - if (filters?.insight === InsightType.TRENDS) { - const groupAggregation = - aggregationGroupTypeIndex !== undefined - ? { math: 'unique_group', math_group_type_index: aggregationGroupTypeIndex } - : {} - const eventAddition = - filters?.actions || filters?.events - ? {} - : { events: [{ ...getDefaultEvent(), ...groupAggregation }] } - newInsightFilters = cleanFilters({ - insight: InsightType.TRENDS, - date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - ...eventAddition, - ...filters, - }) - } else { - newInsightFilters = cleanFilters({ - insight: InsightType.FUNNELS, - funnel_viz_type: FunnelVizType.Steps, - date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - layout: FunnelLayout.horizontal, - aggregation_group_type_index: aggregationGroupTypeIndex, - ...filters, - }) - } - - // This allows switching between insight types. It's necessary as `updateQuerySource` merges - // the new query with any existing query and that causes validation problems when there are - // unsupported properties in the now merged query. - const newQuery = filtersToQueryNode(newInsightFilters) - if (newInsightFilters?.insight === InsightType.FUNNELS) { - ;(newQuery as TrendsQuery).trendsFilter = undefined - } else { - ;(newQuery as FunnelsQuery).funnelsFilter = undefined - } - - // TRICKY: We always know what the group type index should be for funnel queries, so we don't care - // what the previous value was. Hence, instead of a partial update with `updateQuerySource`, we always - // explicitly set it to what it should be - if (isFunnelsQuery(newQuery)) { - newQuery.aggregation_group_type_index = aggregationGroupTypeIndex - } - - actions.updateQuerySource(newQuery) - }, - // sync form value `filters` with query - setQuery: ({ query }) => { - actions.setExperiment({ filters: queryNodeToFilter((query as InsightVizNode).source) }) - }, - setExperimentExposureInsight: async ({ filters }) => { - const newInsightFilters = cleanFilters({ - insight: InsightType.TRENDS, - date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - ...filters, - }) - - actions.updateExposureQuerySource(filtersToQueryNode(newInsightFilters)) - }, - // sync form value `filters` with query - setExposureQuery: ({ query }) => { - actions.setExperiment({ - parameters: { - custom_exposure_filter: queryNodeToFilter((query as InsightVizNode).source), - feature_flag_variants: values.experiment?.parameters?.feature_flag_variants, - }, - }) - }, loadExperimentSuccess: async ({ experiment }) => { experiment && actions.reportExperimentViewed(experiment) @@ -578,18 +595,13 @@ export const experimentLogic = kea([ }) const { recommendedRunningTime, recommendedSampleSize, minimumDetectableEffect } = values - if (!minimumDetectableEffect) { - eventUsageLogic.actions.reportExperimentInsightLoadFailed() - return lemonToast.error( - 'Failed to load insight. Experiment cannot be saved without this value. Try changing the experiment goal.' - ) - } const filtersToUpdate = { ...filters } delete filtersToUpdate.properties actions.updateExperiment({ filters: filtersToUpdate, + metrics: values.experiment.metrics, parameters: { ...values.experiment?.parameters, recommended_running_time: recommendedRunningTime, @@ -597,7 +609,6 @@ export const experimentLogic = kea([ minimum_detectable_effect: minimumDetectableEffect, }, }) - actions.closeExperimentGoalModal() }, updateExperimentCollectionGoal: async () => { const { recommendedRunningTime, recommendedSampleSize, minimumDetectableEffect } = values @@ -614,25 +625,12 @@ export const experimentLogic = kea([ }, updateExperimentExposure: async ({ filters }) => { actions.updateExperiment({ + metrics: values.experiment.metrics, parameters: { custom_exposure_filter: filters ?? undefined, feature_flag_variants: values.experiment?.parameters?.feature_flag_variants, }, }) - actions.closeExperimentExposureModal() - }, - updateExperimentSecondaryMetrics: async ({ metrics }) => { - actions.updateExperiment({ secondary_metrics: metrics }) - }, - closeExperimentGoalModal: () => { - if (values.experimentValuesChangedLocally) { - actions.loadExperiment() - } - }, - closeExperimentExposureModal: () => { - if (values.experimentValuesChangedLocally) { - actions.loadExperiment() - } }, closeExperimentCollectionGoalModal: () => { if (values.experimentValuesChangedLocally) { @@ -648,15 +646,8 @@ export const experimentLogic = kea([ }, updateExperimentSuccess: async ({ experiment }) => { actions.updateExperiments(experiment) - if (values.changingGoalMetric) { - actions.loadExperimentResults() - } - if (values.changingSecondaryMetrics && values.experiment?.start_date) { - actions.loadSecondaryMetricResults() - } - if (values.experiment?.start_date) { - actions.loadExperimentResults() - } + actions.loadExperimentResults() + actions.loadSecondaryMetricResults() }, setExperiment: async ({ experiment }) => { const experimentEntitiesChanged = @@ -729,12 +720,6 @@ export const experimentLogic = kea([ } } }, - openExperimentGoalModal: async () => { - actions.setNewExperimentInsight(values.experiment?.filters) - }, - openExperimentExposureModal: async () => { - actions.setExperimentExposureInsight(values.experiment?.parameters?.custom_exposure_filter) - }, createExposureCohortSuccess: ({ exposureCohort }) => { if (exposureCohort && exposureCohort.id !== 'new') { cohortsModel.actions.cohortCreated(exposureCohort) @@ -821,18 +806,19 @@ export const experimentLogic = kea([ | null > => { try { + // :FLAG: CLEAN UP AFTER MIGRATION if (values.featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const query = values.experiment.metrics[0].query + // Queries are shareable, so we need to set the experiment_id for the backend to correctly associate the query with the experiment + const queryWithExperimentId = { + ...values.experiment.metrics[0], + experiment_id: values.experimentId, + } - const response: ExperimentResults = await api.create( - `api/projects/${values.currentTeamId}/query`, - { query } - ) + const response = await performQuery(queryWithExperimentId, undefined, refresh) return { ...response, fakeInsightId: Math.random().toString(36).substring(2, 15), - last_refresh: response.last_refresh || '', } as unknown as CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse } @@ -846,7 +832,13 @@ export const experimentLogic = kea([ last_refresh: response.last_refresh, } } catch (error: any) { - actions.setExperimentResultCalculationError({ detail: error.detail, statusCode: error.status }) + let errorDetail = error.detail + // :HANDLE FLAG: CLEAN UP AFTER MIGRATION + if (values.featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const errorDetailMatch = error.detail.match(/\{.*\}/) + errorDetail = errorDetailMatch[0] + } + actions.setExperimentResultCalculationError({ detail: errorDetail, statusCode: error.status }) if (error.status === 504) { actions.reportExperimentResultsLoadingTimeout(values.experimentId) } @@ -869,15 +861,17 @@ export const experimentLogic = kea([ | null > => { if (values.featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const secondaryMetrics = - values.experiment?.metrics?.filter((metric) => metric.type === 'secondary') || [] - return (await Promise.all( - secondaryMetrics.map(async (metric) => { + values.experiment?.metrics_secondary.map(async (metric) => { try { + // Queries are shareable, so we need to set the experiment_id for the backend to correctly associate the query with the experiment + const queryWithExperimentId = { + ...metric, + experiment_id: values.experimentId, + } const response: ExperimentResults = await api.create( `api/projects/${values.currentTeamId}/query`, - { query: metric.query } + { query: queryWithExperimentId, refresh: 'lazy_async' } ) return { @@ -970,16 +964,29 @@ export const experimentLogic = kea([ () => [(_, props) => props.experimentId ?? 'new'], (experimentId): Experiment['id'] => experimentId, ], - experimentInsightType: [ + getMetricType: [ (s) => [s.experiment, s.featureFlags], - (experiment, featureFlags): InsightType => { - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const query = experiment?.metrics?.[0]?.query - return query?.kind === NodeKind.ExperimentTrendsQuery ? InsightType.TRENDS : InsightType.FUNNELS - } + (experiment, featureFlags) => + (metricIdx: number = 0) => { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const query = experiment?.metrics?.[metricIdx] + return query?.kind === NodeKind.ExperimentTrendsQuery ? InsightType.TRENDS : InsightType.FUNNELS + } - return experiment?.filters?.insight || InsightType.FUNNELS - }, + return experiment?.filters?.insight || InsightType.FUNNELS + }, + ], + getSecondaryMetricType: [ + (s) => [s.experiment, s.featureFlags], + (experiment, featureFlags) => + (metricIdx: number = 0) => { + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + const query = experiment?.metrics_secondary?.[metricIdx] + return query?.kind === NodeKind.ExperimentTrendsQuery ? InsightType.TRENDS : InsightType.FUNNELS + } + + return experiment?.secondary_metrics?.[metricIdx]?.filters?.insight || InsightType.FUNNELS + }, ], isExperimentRunning: [ (s) => [s.experiment], @@ -1028,7 +1035,7 @@ export const experimentLogic = kea([ let entities: { math?: string }[] = [] if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const query = experiment?.metrics?.[0]?.query as ExperimentTrendsQuery + const query = experiment?.metrics?.[0] as ExperimentTrendsQuery if (!query) { return undefined } @@ -1059,12 +1066,12 @@ export const experimentLogic = kea([ }, ], minimumDetectableEffect: [ - (s) => [s.experiment, s.experimentInsightType, s.conversionMetrics, s.trendResults], - (newExperiment, experimentInsightType, conversionMetrics, trendResults): number => { + (s) => [s.experiment, s.getMetricType, s.conversionMetrics, s.trendResults], + (newExperiment, getMetricType, conversionMetrics, trendResults): number => { return ( newExperiment?.parameters?.minimum_detectable_effect || // :KLUDGE: extracted the method due to difficulties with logic tests - getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults) || + getMinimumDetectableEffect(getMetricType(0), conversionMetrics, trendResults) || 0 ) }, @@ -1175,7 +1182,7 @@ export const experimentLogic = kea([ (s) => [ s.experiment, s.variants, - s.experimentInsightType, + s.getMetricType, s.funnelResults, s.conversionMetrics, s.expectedRunningTime, @@ -1186,7 +1193,7 @@ export const experimentLogic = kea([ ( experiment, variants, - experimentInsightType, + getMetricType, funnelResults, conversionMetrics, expectedRunningTime, @@ -1194,7 +1201,7 @@ export const experimentLogic = kea([ minimumSampleSizePerVariant, recommendedExposureForCountData ): number => { - if (experimentInsightType === InsightType.FUNNELS) { + if (getMetricType(0) === InsightType.FUNNELS) { const currentDuration = dayjs().diff(dayjs(experiment?.start_date), 'hour') const funnelEntrants = funnelResults?.[0]?.count @@ -1282,14 +1289,15 @@ export const experimentLogic = kea([ | CachedSecondaryMetricExperimentFunnelsQueryResponse | CachedSecondaryMetricExperimentTrendsQueryResponse | null, - variantKey: string + variantKey: string, + metricType: InsightType ): [number, number] | null => { const credibleInterval = experimentResults?.credible_intervals?.[variantKey] if (!credibleInterval) { return null } - if (experimentResults.filters?.insight === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { const controlVariant = (experimentResults.variants as FunnelExperimentVariant[]).find( ({ key }) => key === 'control' ) as FunnelExperimentVariant @@ -1321,8 +1329,8 @@ export const experimentLogic = kea([ }, ], getIndexForVariant: [ - (s) => [s.experimentInsightType], - (experimentInsightType) => + (s) => [s.getMetricType], + (getMetricType) => ( experimentResults: | Partial @@ -1338,7 +1346,7 @@ export const experimentLogic = kea([ } let index = -1 - if (experimentInsightType === InsightType.FUNNELS) { + if (getMetricType(0) === InsightType.FUNNELS) { // Funnel Insight is displayed in order of decreasing count index = (Array.isArray(experimentResults.insight) ? [...experimentResults.insight] : []) .sort((a, b) => { @@ -1360,7 +1368,7 @@ export const experimentLogic = kea([ } const result = index === -1 ? null : index - if (result !== null && experimentInsightType === InsightType.FUNNELS) { + if (result !== null && getMetricType(0) === InsightType.FUNNELS) { return result + 1 } return result @@ -1479,16 +1487,17 @@ export const experimentLogic = kea([ }, ], tabularExperimentResults: [ - (s) => [s.experiment, s.experimentResults, s.experimentInsightType], - (experiment, experimentResults, experimentInsightType): any => { + (s) => [s.experiment, s.experimentResults, s.getMetricType], + (experiment, experimentResults, getMetricType): any => { const tabularResults = [] + const metricType = getMetricType(0) if (experimentResults) { for (const variantObj of experimentResults.variants) { - if (experimentInsightType === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { const { key, success_count, failure_count } = variantObj as FunnelExperimentVariant tabularResults.push({ key, success_count, failure_count }) - } else if (experimentInsightType === InsightType.TRENDS) { + } else if (metricType === InsightType.TRENDS) { const { key, count, exposure, absolute_exposure } = variantObj as TrendExperimentVariant tabularResults.push({ key, count, exposure, absolute_exposure }) } @@ -1501,9 +1510,9 @@ export const experimentLogic = kea([ continue } - if (experimentInsightType === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { tabularResults.push({ key, success_count: null, failure_count: null }) - } else if (experimentInsightType === InsightType.TRENDS) { + } else if (metricType === InsightType.TRENDS) { tabularResults.push({ key, count: null, exposure: null, absolute_exposure: null }) } } @@ -1569,9 +1578,9 @@ export const experimentLogic = kea([ }, ], funnelResultsPersonsTotal: [ - (s) => [s.experimentResults, s.experimentInsightType], - (experimentResults: ExperimentResults['result'], experimentInsightType: InsightType): number => { - if (experimentInsightType !== InsightType.FUNNELS || !experimentResults?.insight) { + (s) => [s.experimentResults, s.getMetricType], + (experimentResults, getMetricType): number => { + if (getMetricType(0) !== InsightType.FUNNELS || !experimentResults?.insight) { return 0 } @@ -1614,8 +1623,13 @@ export const experimentLogic = kea([ }, ], hasGoalSet: [ - (s) => [s.experiment], - (experiment): boolean => { + (s) => [s.experiment, s.featureFlags], + (experiment, featureFlags): boolean => { + // :FLAG: CLEAN UP AFTER MIGRATION + if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { + return !!experiment.metrics[0] + } + const filters = experiment?.filters return !!( (filters?.actions && filters.actions.length > 0) || @@ -1671,3 +1685,110 @@ function percentageDistribution(variantCount: number): number[] { percentages[variantCount - 1] = percentageRounded - delta return percentages } + +export function getDefaultFilters(insightType: InsightType, aggregationGroupTypeIndex: number | undefined): FilterType { + let newInsightFilters + if (insightType === InsightType.TRENDS) { + const groupAggregation = + aggregationGroupTypeIndex !== undefined + ? { math: 'unique_group', math_group_type_index: aggregationGroupTypeIndex } + : {} + + newInsightFilters = cleanFilters({ + insight: InsightType.TRENDS, + events: [{ ...getDefaultEvent(), ...groupAggregation }], + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + display: ChartDisplayType.ActionsLineGraph, + entity: EntityTypes.EVENTS, + filter_test_accounts: true, + } as TrendsFilterType) + } else { + newInsightFilters = cleanFilters({ + insight: InsightType.FUNNELS, + events: [ + { + id: '$pageview', + name: '$pageview', + type: 'events', + order: 0, + }, + { + id: '$pageview', + name: 'Pageview', + type: 'events', + order: 1, + }, + ], + funnel_viz_type: FunnelVizType.Steps, + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + layout: FunnelLayout.horizontal, + aggregation_group_type_index: aggregationGroupTypeIndex, + funnel_window_interval: 14, + funnel_window_interval_unit: FunnelConversionWindowTimeUnit.Day, + filter_test_accounts: true, + }) + } + + return newInsightFilters +} + +export function getDefaultTrendsMetric(): ExperimentTrendsQuery { + return { + kind: NodeKind.ExperimentTrendsQuery, + count_query: { + kind: NodeKind.TrendsQuery, + series: [ + { + kind: NodeKind.EventsNode, + name: '$pageview', + event: '$pageview', + }, + ], + interval: 'day', + dateRange: { + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + explicitDate: true, + }, + trendsFilter: { + display: ChartDisplayType.ActionsLineGraph, + }, + filterTestAccounts: true, + }, + } +} + +export function getDefaultFunnelsMetric(): ExperimentFunnelsQuery { + return { + kind: NodeKind.ExperimentFunnelsQuery, + funnels_query: { + kind: NodeKind.FunnelsQuery, + filterTestAccounts: true, + dateRange: { + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + explicitDate: true, + }, + series: [ + { + kind: NodeKind.EventsNode, + event: '$pageview', + name: '$pageview', + }, + { + kind: NodeKind.EventsNode, + event: '$pageview', + name: '$pageview', + }, + ], + funnelsFilter: { + funnelVizType: FunnelVizType.Steps, + funnelWindowIntervalUnit: FunnelConversionWindowTimeUnit.Day, + funnelWindowInterval: 14, + layout: FunnelLayout.horizontal, + }, + }, + } +} diff --git a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts b/frontend/src/scenes/experiments/secondaryMetricsLogic.ts deleted file mode 100644 index 852d7f17a47..00000000000 --- a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts +++ /dev/null @@ -1,221 +0,0 @@ -import { actions, connect, kea, key, listeners, path, props, reducers } from 'kea' -import { forms } from 'kea-forms' -import { FunnelLayout } from 'lib/constants' -import { dayjs } from 'lib/dayjs' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' -import { insightLogic } from 'scenes/insights/insightLogic' -import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { cleanFilters, getDefaultEvent } from 'scenes/insights/utils/cleanFilters' -import { teamLogic } from 'scenes/teamLogic' - -import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' -import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery, InsightVizNode, TrendsQuery } from '~/queries/schema' -import { Experiment, FilterType, FunnelVizType, InsightType, SecondaryExperimentMetric } from '~/types' - -import { SECONDARY_METRIC_INSIGHT_ID } from './constants' -import { experimentLogic } from './experimentLogic' -import type { secondaryMetricsLogicType } from './secondaryMetricsLogicType' - -const DEFAULT_DURATION = 14 - -export const MAX_SECONDARY_METRICS = 10 - -export interface SecondaryMetricsProps { - onMetricsChange: (metrics: SecondaryExperimentMetric[]) => void - initialMetrics: SecondaryExperimentMetric[] - experimentId: Experiment['id'] - defaultAggregationType?: number -} - -export interface SecondaryMetricForm { - name: string - filters: Partial -} - -const defaultFormValuesGenerator: ( - aggregationType?: number, - disableAddEventToDefault?: boolean, - cohortIdToFilter?: number -) => SecondaryMetricForm = (aggregationType, disableAddEventToDefault, cohortIdToFilter) => { - const groupAggregation = - aggregationType !== undefined ? { math: 'unique_group', math_group_type_index: aggregationType } : {} - - const cohortFilter = cohortIdToFilter - ? { properties: [{ key: 'id', type: 'cohort', value: cohortIdToFilter }] } - : {} - const eventAddition = disableAddEventToDefault - ? {} - : { events: [{ ...getDefaultEvent(), ...groupAggregation, ...cohortFilter }] } - - return { - name: '', - filters: { - insight: InsightType.TRENDS, - ...eventAddition, - }, - } -} - -export const secondaryMetricsLogic = kea([ - props({} as SecondaryMetricsProps), - key((props) => `${props.experimentId || 'new'}-${props.defaultAggregationType}`), - path((key) => ['scenes', 'experiment', 'secondaryMetricsLogic', key]), - connect((props: SecondaryMetricsProps) => ({ - logic: [insightLogic({ dashboardItemId: SECONDARY_METRIC_INSIGHT_ID, syncWithUrl: false })], - values: [teamLogic, ['currentTeamId'], experimentLogic({ experimentId: props.experimentId }), ['experiment']], - actions: [ - insightDataLogic({ dashboardItemId: SECONDARY_METRIC_INSIGHT_ID }), - ['setQuery'], - insightVizDataLogic({ dashboardItemId: SECONDARY_METRIC_INSIGHT_ID }), - ['updateQuerySource'], - ], - })), - actions({ - // modal - openModalToCreateSecondaryMetric: true, - openModalToEditSecondaryMetric: ( - metric: SecondaryExperimentMetric, - metricIdx: number, - showResults: boolean = false - ) => ({ - metric, - metricIdx, - showResults, - }), - saveSecondaryMetric: true, - closeModal: true, - - // metrics - setMetricId: (metricIdx: number) => ({ metricIdx }), - addNewMetric: (metric: SecondaryExperimentMetric) => ({ metric }), - updateMetric: (metric: SecondaryExperimentMetric, metricIdx: number) => ({ metric, metricIdx }), - deleteMetric: (metricIdx: number) => ({ metricIdx }), - - // preview insight - setPreviewInsight: (filters?: Partial) => ({ filters }), - }), - reducers(({ props }) => ({ - isModalOpen: [ - false, - { - openModalToCreateSecondaryMetric: () => true, - openModalToEditSecondaryMetric: () => true, - closeModal: () => false, - }, - ], - showResults: [ - false, - { - openModalToEditSecondaryMetric: (_, { showResults }) => showResults, - closeModal: () => false, - }, - ], - existingModalSecondaryMetric: [ - null as SecondaryExperimentMetric | null, - { - openModalToCreateSecondaryMetric: () => null, - openModalToEditSecondaryMetric: (_, { metric }) => metric, - }, - ], - metrics: [ - props.initialMetrics, - { - addNewMetric: (metrics, { metric }) => { - return [...metrics, { ...metric }] - }, - updateMetric: (metrics, { metric, metricIdx }) => { - const metricsCopy = [...metrics] - metricsCopy[metricIdx] = metric - return metricsCopy - }, - deleteMetric: (metrics, { metricIdx }) => metrics.filter((_, idx) => idx !== metricIdx), - }, - ], - metricIdx: [ - 0 as number, - { - setMetricId: (_, { metricIdx }) => metricIdx, - }, - ], - })), - forms(({ props, values }) => ({ - secondaryMetricModal: { - defaults: defaultFormValuesGenerator( - props.defaultAggregationType, - false, - values.experiment?.exposure_cohort - ), - errors: () => ({}), - submit: async () => { - // We don't use the form submit anymore - }, - }, - })), - listeners(({ props, actions, values }) => ({ - openModalToCreateSecondaryMetric: () => { - actions.resetSecondaryMetricModal() - actions.setPreviewInsight( - defaultFormValuesGenerator(props.defaultAggregationType, false, values.experiment?.exposure_cohort) - .filters - ) - }, - openModalToEditSecondaryMetric: ({ metric: { name, filters }, metricIdx }) => { - actions.setSecondaryMetricModalValue('name', name) - actions.setPreviewInsight(filters) - actions.setMetricId(metricIdx) - }, - setPreviewInsight: async ({ filters }) => { - let newInsightFilters - if (filters?.insight === InsightType.FUNNELS) { - newInsightFilters = cleanFilters({ - insight: InsightType.FUNNELS, - funnel_viz_type: FunnelVizType.Steps, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DD'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - layout: FunnelLayout.horizontal, - aggregation_group_type_index: props.defaultAggregationType, - ...filters, - }) - } else { - newInsightFilters = cleanFilters({ - insight: InsightType.TRENDS, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DD'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - ...defaultFormValuesGenerator( - props.defaultAggregationType, - (filters?.actions?.length || 0) + (filters?.events?.length || 0) > 0 - ).filters, - ...filters, - }) - } - - // This allows switching between insight types. It's necessary as `updateQuerySource` merges - // the new query with any existing query and that causes validation problems when there are - // unsupported properties in the now merged query. - const newQuery = filtersToQueryNode(newInsightFilters) - if (filters?.insight === InsightType.FUNNELS) { - ;(newQuery as TrendsQuery).trendsFilter = undefined - } else { - ;(newQuery as FunnelsQuery).funnelsFilter = undefined - } - actions.updateQuerySource(newQuery) - }, - // sync form value `filters` with query - setQuery: ({ query }) => { - actions.setSecondaryMetricModalValue('filters', queryNodeToFilter((query as InsightVizNode).source)) - }, - saveSecondaryMetric: () => { - if (values.existingModalSecondaryMetric) { - actions.updateMetric(values.secondaryMetricModal, values.metricIdx) - } else { - actions.addNewMetric(values.secondaryMetricModal) - } - props.onMetricsChange(values.metrics) - actions.closeModal() - }, - deleteMetric: () => { - props.onMetricsChange(values.metrics) - }, - })), -]) diff --git a/frontend/src/scenes/experiments/utils.test.ts b/frontend/src/scenes/experiments/utils.test.ts index 5748cfae0e7..e3a73c89cbb 100644 --- a/frontend/src/scenes/experiments/utils.test.ts +++ b/frontend/src/scenes/experiments/utils.test.ts @@ -4,7 +4,7 @@ import { getMinimumDetectableEffect, transformFiltersForWinningVariant } from '. describe('utils', () => { it('Funnel experiment returns correct MDE', async () => { - const experimentInsightType = InsightType.FUNNELS + const metricType = InsightType.FUNNELS const trendResults = [ { action: { @@ -26,36 +26,36 @@ describe('utils', () => { ] let conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(1) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 1 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(1) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.01 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(1) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.99 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(1) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.1 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(5) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.9 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(5) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.3 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(3) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(3) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.7 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(3) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(3) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.2 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(4) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(4) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.8 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(4) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(4) conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.5 } - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(5) }) it('Trend experiment returns correct MDE', async () => { - const experimentInsightType = InsightType.TRENDS + const metricType = InsightType.TRENDS const conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0 } const trendResults = [ { @@ -78,19 +78,19 @@ describe('utils', () => { ] trendResults[0].count = 0 - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(100) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(100) trendResults[0].count = 200 - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(100) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(100) trendResults[0].count = 201 - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(20) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(20) trendResults[0].count = 1001 - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(5) trendResults[0].count = 20000 - expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + expect(getMinimumDetectableEffect(metricType, conversionMetrics, trendResults)).toEqual(5) }) it('transforms filters for a winning variant', async () => { diff --git a/frontend/src/scenes/experiments/utils.ts b/frontend/src/scenes/experiments/utils.ts index bdc74270620..4126a904795 100644 --- a/frontend/src/scenes/experiments/utils.ts +++ b/frontend/src/scenes/experiments/utils.ts @@ -31,11 +31,11 @@ export function formatUnitByQuantity(value: number, unit: string): string { } export function getMinimumDetectableEffect( - experimentInsightType: InsightType, + metricType: InsightType, conversionMetrics: FunnelTimeConversionMetrics, trendResults: TrendResult[] ): number | null { - if (experimentInsightType === InsightType.FUNNELS) { + if (metricType === InsightType.FUNNELS) { // FUNNELS // Given current CR, find a realistic target CR increase and return MDE based on it if (!conversionMetrics) { diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts index 88ae79a8fa1..9c247bf0d6c 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts @@ -164,7 +164,7 @@ export const featureFlagReleaseConditionsLogic = kea { - actions.setAffectedUsers(values.filters.groups.length - 1, -1) + actions.setAffectedUsers(values.filters.groups.length - 1, values.totalUsers || -1) }, removeConditionSet: ({ index }) => { const previousLength = Object.keys(values.affectedUsers).length @@ -183,9 +183,20 @@ export const featureFlagReleaseConditionsLogic = kea, }, ]} + data-attr="feature-flags-tab-navigation" />
) diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 8485d628ed0..875b6f56cf8 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -302,7 +302,7 @@ export const featureFlagLogic = kea([ }), forms(({ actions, values }) => ({ featureFlag: { - defaults: { ...NEW_FLAG } as FeatureFlagType, + defaults: { ...NEW_FLAG }, errors: ({ key, filters }) => { return { key: validateFeatureFlagKey(key), diff --git a/frontend/src/scenes/feature-flags/featureFlagReleaseConditionsLogic.test.ts b/frontend/src/scenes/feature-flags/featureFlagReleaseConditionsLogic.test.ts index 1ea91f0d9f0..9ee90f5307c 100644 --- a/frontend/src/scenes/feature-flags/featureFlagReleaseConditionsLogic.test.ts +++ b/frontend/src/scenes/feature-flags/featureFlagReleaseConditionsLogic.test.ts @@ -128,6 +128,7 @@ describe('the feature flag release conditions logic', () => { .mockReturnValueOnce(Promise.resolve({ users_affected: 140, total_users: 2000 })) .mockReturnValueOnce(Promise.resolve({ users_affected: 240, total_users: 2002 })) .mockReturnValueOnce(Promise.resolve({ users_affected: 500, total_users: 2000 })) + .mockReturnValueOnce(Promise.resolve({ users_affected: 750, total_users: 2001 })) logic.mount() }) @@ -138,30 +139,44 @@ describe('the feature flag release conditions logic', () => { }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: -1, 1: undefined, 2: undefined, 3: undefined }, + affectedUsers: { 0: 140, 1: undefined, 2: undefined, 3: undefined }, totalUsers: null, }) .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) .toMatchValues({ - affectedUsers: { 0: -1, 1: 140 }, - totalUsers: 2000, - }) - .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) - .toMatchValues({ - affectedUsers: { 0: -1, 1: 140, 2: 240 }, + affectedUsers: { 0: 140, 1: 240 }, totalUsers: 2002, }) .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) .toMatchValues({ - affectedUsers: { 0: -1, 1: 140, 2: 240, 3: 500 }, + affectedUsers: { 0: 140, 1: 240, 2: 500 }, totalUsers: 2000, }) + .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) + .toMatchValues({ + affectedUsers: { 0: 140, 1: 240, 2: 500, 3: 750 }, + totalUsers: 2001, + }) }) it('updates when adding conditions to a flag', async () => { jest.spyOn(api, 'create') - .mockReturnValueOnce(Promise.resolve({ users_affected: 140, total_users: 2000 })) - .mockReturnValueOnce(Promise.resolve({ users_affected: 240, total_users: 2000 })) + .mockReturnValueOnce(Promise.resolve({ users_affected: 124, total_users: 2000 })) + .mockReturnValueOnce(Promise.resolve({ users_affected: 248, total_users: 2000 })) + .mockReturnValueOnce(Promise.resolve({ users_affected: 496, total_users: 2000 })) + + logic?.unmount() + logic = featureFlagReleaseConditionsLogic({ + id: '5678', + filters: generateFeatureFlagFilters([ + { + properties: [], + rollout_percentage: 50, + variant: null, + }, + ]), + }) + logic.mount() await expectLogic(logic, () => { logic.actions.updateConditionSet(0, 20, [ @@ -176,12 +191,11 @@ describe('the feature flag release conditions logic', () => { // first call is to clear the affected users on mount // second call is to set the affected users for mount logic conditions // third call is to set the affected users for the updateConditionSet action - .toDispatchActions(['setAffectedUsers', 'setAffectedUsers', 'setAffectedUsers']) + .toDispatchActions(['setAffectedUsers', 'setAffectedUsers', 'setAffectedUsers', 'setTotalUsers']) .toMatchValues({ - affectedUsers: { 0: undefined }, - totalUsers: null, + affectedUsers: { 0: 124 }, + totalUsers: 2000, }) - .toNotHaveDispatchedActions(['setTotalUsers']) await expectLogic(logic, () => { logic.actions.updateConditionSet(0, 20, [ @@ -196,11 +210,11 @@ describe('the feature flag release conditions logic', () => { .toDispatchActions(['setAffectedUsers']) .toMatchValues({ affectedUsers: { 0: undefined }, - totalUsers: null, + totalUsers: 2000, }) .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) .toMatchValues({ - affectedUsers: { 0: 140 }, + affectedUsers: { 0: 248 }, totalUsers: 2000, }) @@ -210,7 +224,8 @@ describe('the feature flag release conditions logic', () => { }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: 140, 1: -1 }, + // expect the new empty condition set to initialize affected users to be same as total users + affectedUsers: { 0: 248, 1: 2000 }, totalUsers: 2000, }) .toNotHaveDispatchedActions(['setTotalUsers']) @@ -228,7 +243,7 @@ describe('the feature flag release conditions logic', () => { }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: 140, 1: undefined }, + affectedUsers: { 0: 248, 1: undefined }, totalUsers: 2000, }) .toNotHaveDispatchedActions(['setTotalUsers']) @@ -246,12 +261,12 @@ describe('the feature flag release conditions logic', () => { }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: 140, 1: undefined }, + affectedUsers: { 0: 248, 1: undefined }, totalUsers: 2000, }) .toDispatchActions(['setAffectedUsers', 'setTotalUsers']) .toMatchValues({ - affectedUsers: { 0: 140, 1: 240 }, + affectedUsers: { 0: 248, 1: 496 }, totalUsers: 2000, }) @@ -261,11 +276,11 @@ describe('the feature flag release conditions logic', () => { }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: 240, 1: 240 }, + affectedUsers: { 0: 496, 1: 496 }, }) .toDispatchActions(['setAffectedUsers']) .toMatchValues({ - affectedUsers: { 0: 240, 1: undefined }, + affectedUsers: { 0: 496, 1: undefined }, }) }) @@ -313,7 +328,6 @@ describe('the feature flag release conditions logic', () => { jest.spyOn(api, 'create') logic?.unmount() - logic = featureFlagReleaseConditionsLogic({ id: '12345', filters: generateFeatureFlagFilters([ @@ -359,11 +373,11 @@ describe('the feature flag release conditions logic', () => { 'setTotalUsers', ]) .toMatchValues({ - affectedUsers: { 0: -1, 1: 120, 2: 120 }, + affectedUsers: { 0: 120, 1: 120, 2: 120 }, totalUsers: 2000, }) - expect(api.create).toHaveBeenCalledTimes(2) + expect(api.create).toHaveBeenCalledTimes(4) await expectLogic(logic, () => { logic.actions.updateConditionSet(0, 20, undefined, undefined) @@ -378,7 +392,7 @@ describe('the feature flag release conditions logic', () => { }).toNotHaveDispatchedActions(['setAffectedUsers', 'setTotalUsers']) // no extra calls when changing rollout percentage - expect(api.create).toHaveBeenCalledTimes(2) + expect(api.create).toHaveBeenCalledTimes(4) }) }) }) diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index b39b7d052d2..8c11216de81 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -239,7 +239,17 @@ export const featureFlagsLogic = kea([ pageFiltersFromUrl.page = parseInt(page) } - actions.setFeatureFlagsFilters({ ...DEFAULT_FILTERS, ...pageFiltersFromUrl }) + // Initialize filters with the URL params if none are set + const isInitializingFilters = + objectsEqual(DEFAULT_FILTERS, values.filters) && !objectsEqual(DEFAULT_FILTERS, pageFiltersFromUrl) + /** + * Pagination search param in the URL is modified directly by the LemonTable component, + * so let's update filter state if it changes + */ + const isChangingPage = page !== undefined && page !== values.filters.page + if (isInitializingFilters || isChangingPage) { + actions.setFeatureFlagsFilters({ ...DEFAULT_FILTERS, ...pageFiltersFromUrl }) + } }, })), events(({ actions }) => ({ diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx index b4d3fc16421..15586e766a3 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx @@ -40,6 +40,7 @@ import { } from 'scenes/trends/mathsLogic' import { actionsModel } from '~/models/actionsModel' +import { NodeKind } from '~/queries/schema' import { isInsightVizNode, isStickinessQuery } from '~/queries/utils' import { ActionFilter, @@ -596,9 +597,20 @@ export function ActionFilterRow({ onChange={(properties) => updateFilterProperty({ properties, index })} showNestedArrow={showNestedArrow} disablePopover={!propertyFiltersPopover} + metadataSource={ + filter.type == TaxonomicFilterGroupType.DataWarehouse + ? { + kind: NodeKind.HogQLQuery, + query: `select ${filter.distinct_id_field} from ${filter.table_name}`, + } + : undefined + } taxonomicGroupTypes={ filter.type == TaxonomicFilterGroupType.DataWarehouse - ? [TaxonomicFilterGroupType.DataWarehouseProperties] + ? [ + TaxonomicFilterGroupType.DataWarehouseProperties, + TaxonomicFilterGroupType.HogQLExpression, + ] : propertiesTaxonomicGroupTypes } eventNames={ diff --git a/frontend/src/scenes/insights/filters/AggregationSelect.tsx b/frontend/src/scenes/insights/filters/AggregationSelect.tsx index 1aae9ea390e..4086bc02831 100644 --- a/frontend/src/scenes/insights/filters/AggregationSelect.tsx +++ b/frontend/src/scenes/insights/filters/AggregationSelect.tsx @@ -10,7 +10,7 @@ import { FunnelsQuery } from '~/queries/schema' import { isFunnelsQuery, isInsightQueryNode, isStickinessQuery } from '~/queries/utils' import { InsightLogicProps } from '~/types' -function getHogQLValue(groupIndex?: number, aggregationQuery?: string): string { +export function getHogQLValue(groupIndex?: number, aggregationQuery?: string): string { if (groupIndex !== undefined) { return `$group_${groupIndex}` } else if (aggregationQuery) { @@ -19,7 +19,7 @@ function getHogQLValue(groupIndex?: number, aggregationQuery?: string): string { return UNIQUE_USERS } -function hogQLToFilterValue(value?: string): { groupIndex?: number; aggregationQuery?: string } { +export function hogQLToFilterValue(value?: string): { groupIndex?: number; aggregationQuery?: string } { if (value?.match(/^\$group_[0-9]+$/)) { return { groupIndex: parseInt(value.replace('$group_', '')) } } else if (value === 'person_id') { diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelConversionWindowFilter.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelConversionWindowFilter.tsx index 7233638856c..54f28a4fd28 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelConversionWindowFilter.tsx +++ b/frontend/src/scenes/insights/views/Funnels/FunnelConversionWindowFilter.tsx @@ -10,7 +10,7 @@ import { useDebouncedCallback } from 'use-debounce' import { FunnelsFilter } from '~/queries/schema' import { EditorFilterProps, FunnelConversionWindow, FunnelConversionWindowTimeUnit } from '~/types' -const TIME_INTERVAL_BOUNDS: Record = { +export const TIME_INTERVAL_BOUNDS: Record = { [FunnelConversionWindowTimeUnit.Second]: [1, 3600], [FunnelConversionWindowTimeUnit.Minute]: [1, 1440], [FunnelConversionWindowTimeUnit.Hour]: [1, 24], diff --git a/frontend/src/scenes/max/QuestionInput.tsx b/frontend/src/scenes/max/QuestionInput.tsx index f39eb4d1132..55ea5371230 100644 --- a/frontend/src/scenes/max/QuestionInput.tsx +++ b/frontend/src/scenes/max/QuestionInput.tsx @@ -25,7 +25,7 @@ export function QuestionInput(): JSX.Element { className={clsx( !isFloating ? 'w-[min(44rem,100%)] relative' - : 'w-full max-w-200 sticky z-10 self-center p-1 mx-3 mb-3 bottom-3 border border-[var(--glass-border-3000)] rounded-[0.625rem] backdrop-blur bg-[var(--glass-bg-3000)]' + : 'w-full max-w-192 sticky z-10 self-center p-1 mx-4 mb-3 bottom-3 border border-[var(--glass-border-3000)] rounded-[0.625rem] backdrop-blur bg-[var(--glass-bg-3000)]' )} > } + center + className="shrink" > {suggestion} diff --git a/frontend/src/scenes/max/Thread.tsx b/frontend/src/scenes/max/Thread.tsx index 8a2f299a312..a4fa9132987 100644 --- a/frontend/src/scenes/max/Thread.tsx +++ b/frontend/src/scenes/max/Thread.tsx @@ -1,4 +1,5 @@ import { + IconRefresh, IconThumbsDown, IconThumbsDownFilled, IconThumbsUp, @@ -11,91 +12,71 @@ import clsx from 'clsx' import { useActions, useValues } from 'kea' import { BreakdownSummary, PropertiesSummary, SeriesSummary } from 'lib/components/Cards/InsightCard/InsightDetails' import { TopHeading } from 'lib/components/Cards/InsightCard/TopHeading' -import { IconRefresh } from 'lib/lemon-ui/icons' import { IconOpenInNew } from 'lib/lemon-ui/icons' +import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown' import posthog from 'posthog-js' import React, { useMemo, useRef, useState } from 'react' import { urls } from 'scenes/urls' import { Query } from '~/queries/Query/Query' -import { AssistantMessageType, HumanMessage, InsightVizNode, NodeKind, VisualizationMessage } from '~/queries/schema' +import { + AssistantMessage, + AssistantMessageType, + FailureMessage, + HumanMessage, + InsightVizNode, + NodeKind, + VisualizationMessage, +} from '~/queries/schema' import { maxLogic, MessageStatus, ThreadMessage } from './maxLogic' -import { castAssistantQuery, isFailureMessage, isHumanMessage, isVisualizationMessage } from './utils' +import { + castAssistantQuery, + isAssistantMessage, + isFailureMessage, + isHumanMessage, + isVisualizationMessage, +} from './utils' export function Thread(): JSX.Element | null { const { thread, threadLoading } = useValues(maxLogic) - const { retryLastMessage } = useActions(maxLogic) return (
{thread.map((message, index) => { if (isHumanMessage(message)) { return ( - - {message.content || No text} - + {message.content || '*No text.*'} + ) + } else if (isAssistantMessage(message) || isFailureMessage(message)) { + return + } else if (isVisualizationMessage(message)) { + return } - - if (isVisualizationMessage(message)) { - return ( - - ) - } - - if (isFailureMessage(message)) { - return ( - } - size="small" - className="mt-2" - type="secondary" - onClick={() => retryLastMessage()} - > - Try again - - ) - } - > - {message.content || Max has failed to generate an answer. Please try again.} - - ) - } - - return null + return null // We currently skip other types of messages })} {threadLoading && ( - +
Let me think…
-
+ )}
) } -const Message = React.forwardRef< +const MessageTemplate = React.forwardRef< HTMLDivElement, - React.PropsWithChildren<{ type: 'human' | 'ai'; className?: string; action?: React.ReactNode }> ->(function Message({ type, children, className, action }, ref): JSX.Element { + { type: 'human' | 'ai'; className?: string; action?: React.ReactNode; children: React.ReactNode } +>(function MessageTemplate({ type, children, className, action }, ref) { if (type === AssistantMessageType.Human) { return (
@@ -105,7 +86,7 @@ const Message = React.forwardRef< } return ( -
+
{children}
@@ -114,14 +95,41 @@ const Message = React.forwardRef< ) }) -function Answer({ +const TextAnswer = React.forwardRef< + HTMLDivElement, + { message: (AssistantMessage | FailureMessage) & ThreadMessage; index: number } +>(function TextAnswer({ message, index }, ref) { + const { thread } = useValues(maxLogic) + + return ( + + ) : message.type === 'ai' && + message.status === 'completed' && + (thread[index + 1] === undefined || thread[index + 1].type === 'human') ? ( + // Show answer actions if the assistant's response is complete at this point + + ) : null + } + > + + {message.content || '*Max has failed to generate an answer. Please try again.*'} + + + ) +}) + +function VisualizationAnswer({ message, status, - previousMessage, }: { message: VisualizationMessage status?: MessageStatus - previousMessage: ThreadMessage }): JSX.Element { const query = useMemo(() => { if (message.answer) { @@ -138,7 +146,7 @@ function Answer({ return ( <> {message.reasoning_steps && ( - {step} ))} - + )} {status === 'completed' && query && ( <> - +
@@ -178,36 +186,55 @@ function Answer({
- - {isHumanMessage(previousMessage) && ( - - )} + )} ) } -function AnswerActions({ - message, - previousMessage, -}: { - message: VisualizationMessage - previousMessage: HumanMessage -}): JSX.Element { +function RetriableAnswerActions(): JSX.Element { + const { retryLastMessage } = useActions(maxLogic) + + return ( + } + type="secondary" + size="small" + tooltip="Try again" + onClick={() => retryLastMessage()} + > + Try again + + ) +} + +function SuccessfulAnswerActions({ messageIndex }: { messageIndex: number }): JSX.Element { + const { thread } = useValues(maxLogic) + const { retryLastMessage } = useActions(maxLogic) + const [rating, setRating] = useState<'good' | 'bad' | null>(null) const [feedback, setFeedback] = useState('') const [feedbackInputStatus, setFeedbackInputStatus] = useState<'hidden' | 'pending' | 'submitted'>('hidden') const hasScrolledFeedbackInputIntoView = useRef(false) + const [relevantHumanMessage, relevantVisualizationMessage] = useMemo(() => { + // We need to find the relevant visualization message (which might be a message earlier if the most recent one + // is a results summary message), and the human message that triggered it. + const relevantMessages = thread.slice(0, messageIndex + 1).reverse() + const visualizationMessage = relevantMessages.find(isVisualizationMessage) as VisualizationMessage + const humanMessage = relevantMessages.find(isHumanMessage) as HumanMessage + return [humanMessage, visualizationMessage] + }, [thread, messageIndex]) + function submitRating(newRating: 'good' | 'bad'): void { if (rating) { return // Already rated } setRating(newRating) posthog.capture('chat rating', { - question: previousMessage.content, - answer: JSON.stringify(message.answer), + question: relevantHumanMessage.content, + answer: JSON.stringify(relevantVisualizationMessage.answer), answer_rating: rating, }) if (newRating === 'bad') { @@ -220,8 +247,8 @@ function AnswerActions({ return // Input is empty } posthog.capture('chat feedback', { - question: previousMessage.content, - answer: JSON.stringify(message.answer), + question: relevantHumanMessage.content, + answer: JSON.stringify(relevantVisualizationMessage.answer), feedback, }) setFeedbackInputStatus('submitted') @@ -248,9 +275,18 @@ function AnswerActions({ onClick={() => submitRating('bad')} /> )} + {messageIndex === thread.length - 1 && ( + } + type="tertiary" + size="small" + tooltip="Try again" + onClick={() => retryLastMessage()} + /> + )}
{feedbackInputStatus !== 'hidden' && ( - { if (el && !hasScrolledFeedbackInputIntoView.current) { @@ -292,7 +328,7 @@ function AnswerActions({
)} - + )} ) diff --git a/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts b/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts index 3bd38eb1e62..dffb3cfa056 100644 --- a/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts +++ b/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts @@ -1,17 +1,23 @@ import { AssistantGenerationStatusEvent, AssistantGenerationStatusType } from '~/queries/schema' -import chatResponse from './chatResponse.json' -import failureResponse from './failureResponse.json' +import failureMessage from './failureMessage.json' +import summaryMessage from './summaryMessage.json' +import visualizationMessage from './visualizationMessage.json' function generateChunk(events: string[]): string { return events.map((event) => (event.startsWith('event:') ? `${event}\n` : `${event}\n\n`)).join('') } -export const chatResponseChunk = generateChunk(['event: message', `data: ${JSON.stringify(chatResponse)}`]) +export const chatResponseChunk = generateChunk([ + 'event: message', + `data: ${JSON.stringify(visualizationMessage)}`, + 'event: message', + `data: ${JSON.stringify(summaryMessage)}`, +]) const generationFailure: AssistantGenerationStatusEvent = { type: AssistantGenerationStatusType.GenerationError } const responseWithReasoningStepsOnly = { - ...chatResponse, + ...visualizationMessage, answer: null, } @@ -22,4 +28,4 @@ export const generationFailureChunk = generateChunk([ `data: ${JSON.stringify(generationFailure)}`, ]) -export const failureChunk = generateChunk(['event: message', `data: ${JSON.stringify(failureResponse)}`]) +export const failureChunk = generateChunk(['event: message', `data: ${JSON.stringify(failureMessage)}`]) diff --git a/frontend/src/scenes/max/__mocks__/failureResponse.json b/frontend/src/scenes/max/__mocks__/failureMessage.json similarity index 100% rename from frontend/src/scenes/max/__mocks__/failureResponse.json rename to frontend/src/scenes/max/__mocks__/failureMessage.json diff --git a/frontend/src/scenes/max/__mocks__/summaryMessage.json b/frontend/src/scenes/max/__mocks__/summaryMessage.json new file mode 100644 index 00000000000..011565dc126 --- /dev/null +++ b/frontend/src/scenes/max/__mocks__/summaryMessage.json @@ -0,0 +1,5 @@ +{ + "type": "ai", + "content": "Looks like no pageviews have occured. Get some damn users.", + "done": true +} diff --git a/frontend/src/scenes/max/__mocks__/chatResponse.json b/frontend/src/scenes/max/__mocks__/visualizationMessage.json similarity index 99% rename from frontend/src/scenes/max/__mocks__/chatResponse.json rename to frontend/src/scenes/max/__mocks__/visualizationMessage.json index 5fed25c08bf..cabfe93ca1c 100644 --- a/frontend/src/scenes/max/__mocks__/chatResponse.json +++ b/frontend/src/scenes/max/__mocks__/visualizationMessage.json @@ -64,5 +64,6 @@ "smoothingIntervals": 1, "yAxisScaleType": null } - } + }, + "done": true } diff --git a/frontend/src/scenes/max/maxLogic.ts b/frontend/src/scenes/max/maxLogic.ts index 0ff61094dee..04f36c6cd1c 100644 --- a/frontend/src/scenes/max/maxLogic.ts +++ b/frontend/src/scenes/max/maxLogic.ts @@ -4,7 +4,7 @@ import { createParser } from 'eventsource-parser' import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' -import { isHumanMessage, isRouterMessage, isVisualizationMessage } from 'scenes/max/utils' +import { isHumanMessage } from 'scenes/max/utils' import { projectLogic } from 'scenes/projectLogic' import { @@ -13,6 +13,7 @@ import { AssistantGenerationStatusType, AssistantMessageType, FailureMessage, + HumanMessage, NodeKind, RefreshType, RootAssistantMessage, @@ -28,12 +29,14 @@ export interface MaxLogicProps { export type MessageStatus = 'loading' | 'completed' | 'error' export type ThreadMessage = RootAssistantMessage & { - status?: MessageStatus + status: MessageStatus } -const FAILURE_MESSAGE: FailureMessage = { +const FAILURE_MESSAGE: FailureMessage & ThreadMessage = { type: AssistantMessageType.Failure, content: 'Oops! It looks like I’m having trouble generating this trends insight. Could you please try again?', + status: 'error', + done: true, } export const maxLogic = kea([ @@ -48,7 +51,7 @@ export const maxLogic = kea([ setThreadLoaded: (testOnlyOverride = false) => ({ testOnlyOverride }), addMessage: (message: ThreadMessage) => ({ message }), replaceMessage: (index: number, message: ThreadMessage) => ({ index, message }), - setMessageStatus: (index: number, status: ThreadMessage['status']) => ({ index, status }), + setMessageStatus: (index: number, status: MessageStatus) => ({ index, status }), setQuestion: (question: string) => ({ question }), setVisibleSuggestions: (suggestions: string[]) => ({ suggestions }), shuffleVisibleSuggestions: true, @@ -149,9 +152,7 @@ export const maxLogic = kea([ ) }, askMax: async ({ prompt }) => { - actions.addMessage({ type: AssistantMessageType.Human, content: prompt }) - let generatingMessageIndex: number = -1 - + actions.addMessage({ type: AssistantMessageType.Human, content: prompt, done: true, status: 'completed' }) try { const response = await api.chat({ session_id: props.sessionId, @@ -173,21 +174,15 @@ export const maxLogic = kea([ return } - if (isRouterMessage(parsedResponse)) { + if (values.thread[values.thread.length - 1].status === 'completed') { actions.addMessage({ ...parsedResponse, - status: 'completed', + status: !parsedResponse.done ? 'loading' : 'completed', }) - } else if (generatingMessageIndex === -1) { - generatingMessageIndex = values.thread.length - - if (parsedResponse) { - actions.addMessage({ ...parsedResponse, status: 'loading' }) - } } else if (parsedResponse) { - actions.replaceMessage(generatingMessageIndex, { + actions.replaceMessage(values.thread.length - 1, { ...parsedResponse, - status: values.thread[generatingMessageIndex].status, + status: !parsedResponse.done ? 'loading' : 'completed', }) } } else if (event === AssistantEventType.Status) { @@ -197,7 +192,7 @@ export const maxLogic = kea([ } if (parsedResponse.type === AssistantGenerationStatusType.GenerationError) { - actions.setMessageStatus(generatingMessageIndex, 'error') + actions.setMessageStatus(values.thread.length - 1, 'error') } } }, @@ -205,47 +200,28 @@ export const maxLogic = kea([ while (true) { const { done, value } = await reader.read() - parser.feed(decoder.decode(value)) - if (done) { - if (generatingMessageIndex === -1) { - break - } - - const generatedMessage = values.thread[generatingMessageIndex] - if (generatedMessage && isVisualizationMessage(generatedMessage) && generatedMessage.plan) { - actions.setMessageStatus(generatingMessageIndex, 'completed') - } else if (generatedMessage) { - actions.replaceMessage(generatingMessageIndex, FAILURE_MESSAGE) - } else { - actions.addMessage({ - ...FAILURE_MESSAGE, - status: 'completed', - }) - } break } } } catch (e) { captureException(e) - if (generatingMessageIndex !== -1) { - if (values.thread[generatingMessageIndex]) { - actions.replaceMessage(generatingMessageIndex, FAILURE_MESSAGE) - } else { - actions.addMessage({ - ...FAILURE_MESSAGE, - status: 'completed', - }) - } + if (values.thread[values.thread.length - 1]?.status === 'loading') { + actions.replaceMessage(values.thread.length - 1, FAILURE_MESSAGE) + } else if (values.thread[values.thread.length - 1]?.status !== 'error') { + actions.addMessage({ + ...FAILURE_MESSAGE, + status: 'completed', + }) } } actions.setThreadLoaded() }, retryLastMessage: () => { - const lastMessage = values.thread.filter(isHumanMessage).pop() + const lastMessage = values.thread.filter(isHumanMessage).pop() as HumanMessage | undefined if (lastMessage) { actions.askMax(lastMessage.content) } diff --git a/frontend/src/scenes/max/utils.ts b/frontend/src/scenes/max/utils.ts index 6c6d77204f5..0bfa5757863 100644 --- a/frontend/src/scenes/max/utils.ts +++ b/frontend/src/scenes/max/utils.ts @@ -1,5 +1,6 @@ import { AssistantFunnelsQuery, + AssistantMessage, AssistantMessageType, AssistantTrendsQuery, FailureMessage, @@ -22,6 +23,10 @@ export function isHumanMessage(message: RootAssistantMessage | undefined | null) return message?.type === AssistantMessageType.Human } +export function isAssistantMessage(message: RootAssistantMessage | undefined | null): message is AssistantMessage { + return message?.type === AssistantMessageType.Assistant +} + export function isFailureMessage(message: RootAssistantMessage | undefined | null): message is FailureMessage { return message?.type === AssistantMessageType.Failure } diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index 522e28c569a..a4106a69173 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -26,6 +26,7 @@ import { ExperimentsSDKInstructions } from './sdks/experiments/ExperimentsSDKIns import { FeatureFlagsSDKInstructions } from './sdks/feature-flags/FeatureFlagsSDKInstructions' import { ProductAnalyticsSDKInstructions } from './sdks/product-analytics/ProductAnalyticsSDKInstructions' import { SDKs } from './sdks/SDKs' +import { sdksLogic } from './sdks/sdksLogic' import { SessionReplaySDKInstructions } from './sdks/session-replay/SessionReplaySDKInstructions' import { SurveysSDKInstructions } from './sdks/surveys/SurveysSDKInstructions' @@ -105,12 +106,16 @@ const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Ele const ProductAnalyticsOnboarding = (): JSX.Element => { const { currentTeam } = useValues(teamLogic) const { featureFlags } = useValues(featureFlagLogic) + const { combinedSnippetAndLiveEventsHosts } = useValues(sdksLogic) + // mount the logic here so that it stays mounted for the entire onboarding flow // not sure if there is a better way to do this useValues(newDashboardLogic) const showTemplateSteps = - featureFlags[FEATURE_FLAGS.ONBOARDING_DASHBOARD_TEMPLATES] == 'test' && window.innerWidth > 1000 + featureFlags[FEATURE_FLAGS.ONBOARDING_DASHBOARD_TEMPLATES] == 'test' && + window.innerWidth > 1000 && + combinedSnippetAndLiveEventsHosts.length > 0 const options: ProductConfigOption[] = [ { diff --git a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx index c2740d598ca..9d30f984a4c 100644 --- a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx +++ b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx @@ -52,8 +52,8 @@ const UrlInput = ({ iframeRef }: { iframeRef: React.RefObject return (
setInputValue(v)} diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.stories.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.stories.tsx new file mode 100644 index 00000000000..1c3376a829f --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.stories.tsx @@ -0,0 +1,88 @@ +import { Meta, StoryFn, StoryObj } from '@storybook/react' +import { BindLogic, useActions, useValues } from 'kea' +import { useEffect } from 'react' +import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query' +import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json' +import { snapshotsAsJSONLines } from 'scenes/session-recordings/__mocks__/recording_snapshots' +import { PlayerInspector } from 'scenes/session-recordings/player/inspector/PlayerInspector' +import { sessionRecordingDataLogic } from 'scenes/session-recordings/player/sessionRecordingDataLogic' +import { sessionRecordingPlayerLogic } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' + +import { mswDecorator } from '~/mocks/browser' + +type Story = StoryObj +const meta: Meta = { + title: 'Components/PlayerInspector', + component: PlayerInspector, + decorators: [ + mswDecorator({ + get: { + '/api/environments/:team_id/session_recordings/:id': recordingMetaJson, + '/api/environments/:team_id/session_recordings/:id/snapshots': (req, res, ctx) => { + // with no sources, returns sources... + if (req.url.searchParams.get('source') === 'blob') { + return res(ctx.text(snapshotsAsJSONLines())) + } + // with no source requested should return sources + return [ + 200, + { + sources: [ + { + source: 'blob', + start_timestamp: '2023-08-11T12:03:36.097000Z', + end_timestamp: '2023-08-11T12:04:52.268000Z', + blob_key: '1691755416097-1691755492268', + }, + ], + }, + ] + }, + }, + post: { + '/api/environments/:team_id/query': (req, res, ctx) => { + const body = req.body as Record + if (body.query.kind === 'EventsQuery' && body.query.properties.length === 1) { + return res(ctx.json(recordingEventsJson)) + } + + // default to an empty response or we duplicate information + return res(ctx.json({ results: [] })) + }, + }, + }), + ], +} +export default meta + +const BasicTemplate: StoryFn = () => { + const dataLogic = sessionRecordingDataLogic({ sessionRecordingId: '12345', playerKey: 'story-template' }) + const { sessionPlayerMetaData } = useValues(dataLogic) + + const { loadSnapshots, loadEvents } = useActions(dataLogic) + loadSnapshots() + + // TODO you have to call actions in a particular order + // and only when some other data has already been loaded + // 🫠 + useEffect(() => { + loadEvents() + }, [sessionPlayerMetaData]) + + return ( +
+ + + +
+ ) +} + +export const Default: Story = BasicTemplate.bind({}) +Default.args = {} diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx new file mode 100644 index 00000000000..dc8c712cef4 --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx @@ -0,0 +1,11 @@ +import { PlayerInspectorControls } from 'scenes/session-recordings/player/inspector/PlayerInspectorControls' +import { PlayerInspectorList } from 'scenes/session-recordings/player/inspector/PlayerInspectorList' + +export function PlayerInspector(): JSX.Element { + return ( + <> + + + + ) +} diff --git a/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarTab.tsx b/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarTab.tsx index 8f2c12055f2..9c69a46274d 100644 --- a/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarTab.tsx +++ b/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarTab.tsx @@ -1,9 +1,8 @@ import { useValues } from 'kea' +import { PlayerInspector } from 'scenes/session-recordings/player/inspector/PlayerInspector' import { SessionRecordingSidebarTab } from '~/types' -import { PlayerInspectorControls } from '../inspector/PlayerInspectorControls' -import { PlayerInspectorList } from '../inspector/PlayerInspectorList' import { PlayerSidebarDebuggerTab } from './PlayerSidebarDebuggerTab' import { playerSidebarLogic } from './playerSidebarLogic' import { PlayerSidebarOverviewTab } from './PlayerSidebarOverviewTab' @@ -15,12 +14,7 @@ export function PlayerSidebarTab(): JSX.Element | null { case SessionRecordingSidebarTab.OVERVIEW: return case SessionRecordingSidebarTab.INSPECTOR: - return ( - <> - - - - ) + return case SessionRecordingSidebarTab.DEBUGGER: return default: diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 89f6eef2c22..28a2b8d9205 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -6,6 +6,7 @@ import { IconInfo } from '@posthog/icons' import { IconLock, IconPlus, IconTrash } from '@posthog/icons' import { LemonButton, + LemonCalendarSelect, LemonCheckbox, LemonCollapse, LemonDialog, @@ -15,17 +16,21 @@ import { LemonTag, LemonTextArea, Link, + Popover, } from '@posthog/lemon-ui' import { BindLogic, useActions, useValues } from 'kea' import { EventSelect } from 'lib/components/EventSelect/EventSelect' import { FlagSelector } from 'lib/components/FlagSelector' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { FEATURE_FLAGS } from 'lib/constants' +import { dayjs } from 'lib/dayjs' import { IconCancel } from 'lib/lemon-ui/icons' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' +import { formatDate } from 'lib/utils' +import { useMemo, useState } from 'react' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' import { FeatureFlagReleaseConditions } from 'scenes/feature-flags/FeatureFlagReleaseConditions' @@ -62,15 +67,21 @@ export default function SurveyEdit(): JSX.Element { schedule, hasBranchingLogic, surveyRepeatedActivationAvailable, + dataCollectionType, + surveyUsesLimit, + surveyUsesAdaptiveLimit, } = useValues(surveyLogic) const { setSurveyValue, resetTargeting, + resetSurveyResponseLimits, + resetSurveyAdaptiveSampling, setSelectedPageIndex, setSelectedSection, setFlagPropertyErrors, setSchedule, deleteBranchingLogic, + setDataCollectionType, } = useActions(surveyLogic) const { surveysMultipleQuestionsAvailable, @@ -79,11 +90,25 @@ export default function SurveyEdit(): JSX.Element { surveysActionsAvailable, } = useValues(surveysLogic) const { featureFlags } = useValues(enabledFeaturesLogic) + const [visible, setVisible] = useState(false) const sortedItemIds = survey.questions.map((_, idx) => idx.toString()) const { thankYouMessageDescriptionContentType = null } = survey.appearance ?? {} const surveysRecurringScheduleDisabledReason = surveysRecurringScheduleAvailable ? undefined : 'Upgrade your plan to use repeating surveys' + const surveysAdaptiveLimitsDisabledReason = surveysRecurringScheduleAvailable + ? undefined + : 'Upgrade your plan to use an adaptive limit on survey responses' + + useMemo(() => { + if (surveyUsesLimit) { + setDataCollectionType('until_limit') + } else if (surveyUsesAdaptiveLimit) { + setDataCollectionType('until_adaptive_limit') + } else { + setDataCollectionType('until_stopped') + } + }, [surveyUsesLimit, surveyUsesAdaptiveLimit, setDataCollectionType]) if (survey.iteration_count && survey.iteration_count > 0) { setSchedule('recurring') @@ -852,44 +877,157 @@ export default function SurveyEdit(): JSX.Element { header: 'Completion conditions', content: ( <> - - {({ onChange, value }) => { - return ( -
- { - const newResponsesLimit = checked ? 100 : null - onChange(newResponsesLimit) - }} - /> - Stop the survey once - { - if (newValue && newValue > 0) { - onChange(newValue) - } else { - onChange(null) - } - }} - className="w-16" - />{' '} - responses are received. - - - -
- ) - }} -
+
+

How long would you like to collect survey responses?

+ + { + if (newValue === 'until_limit') { + resetSurveyAdaptiveSampling() + setSurveyValue('responses_limit', survey.responses_limit || 100) + } else if (newValue === 'until_adaptive_limit') { + resetSurveyResponseLimits() + setSurveyValue( + 'response_sampling_interval', + survey.response_sampling_interval || 1 + ) + setSurveyValue( + 'response_sampling_interval_type', + survey.response_sampling_interval_type || 'month' + ) + setSurveyValue( + 'response_sampling_limit', + survey.response_sampling_limit || 100 + ) + setSurveyValue( + 'response_sampling_start_date', + survey.response_sampling_start_date || dayjs() + ) + } else { + resetSurveyResponseLimits() + resetSurveyAdaptiveSampling() + } + setDataCollectionType(newValue) + }} + options={[ + { + value: 'until_stopped', + label: 'Keep collecting responses until the survey is stopped', + 'data-attr': 'survey-collection-until-stopped', + }, + { + value: 'until_limit', + label: 'Stop displaying the survey after reaching a certain number of completed surveys', + 'data-attr': 'survey-collection-until-limit', + }, + { + value: 'until_adaptive_limit', + label: 'Collect a certain number of surveys per day, week or month', + 'data-attr': 'survey-collection-until-adaptive-limit', + disabledReason: surveysAdaptiveLimitsDisabledReason, + }, + ]} + /> + +
+ {dataCollectionType == 'until_adaptive_limit' && ( + +
+ Starting on{' '} + { + setSurveyValue('response_sampling_start_date', value) + setVisible(false) + }} + showTimeToggle={false} + onClose={() => setVisible(false)} + /> + } + visible={visible} + onClickOutside={() => setVisible(false)} + > + setVisible(!visible)}> + {formatDate(dayjs(survey.response_sampling_start_date || ''))} + + + , capture up to + { + setSurveyValue('response_sampling_limit', newValue) + }} + value={survey.response_sampling_limit || 0} + /> + responses, every + { + setSurveyValue('response_sampling_interval', newValue) + }} + value={survey.response_sampling_interval || 0} + /> + { + setSurveyValue('response_sampling_interval_type', newValue) + }} + options={[ + { value: 'day', label: 'Day(s)' }, + { value: 'week', label: 'Week(s)' }, + { value: 'month', label: 'Month(s)' }, + ]} + /> + + + +
+
+ )} + {dataCollectionType == 'until_limit' && ( + + {({ onChange, value }) => { + return ( +
+ Stop the survey once + { + if (newValue && newValue > 0) { + onChange(newValue) + } else { + onChange(null) + } + }} + className="w-16" + />{' '} + responses are received. + + + +
+ ) + }} +
+ )} {featureFlags[FEATURE_FLAGS.SURVEYS_RECURRING] && ( -
-

How often should we show this survey?

+
+

How often should we show this survey?

) : null}
- {survey.responses_limit && ( + {surveyUsesLimit && ( <> Completion conditions @@ -351,6 +352,17 @@ export function SurveyView({ id }: { id: string }): JSX.Element { )} + {surveyUsesAdaptiveLimit && ( + <> + Completion conditions + + Survey response collection is limited to receive{' '} + {survey.response_sampling_limit} responses every{' '} + {survey.response_sampling_interval}{' '} + {survey.response_sampling_interval_type}(s). + + + )} { id: 'new' linked_flag_id: number | null diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index 528aac6db6e..65345656590 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -105,6 +105,7 @@ export interface QuestionResultsReady { [key: string]: boolean } +export type DataCollectionType = 'until_stopped' | 'until_limit' | 'until_adaptive_limit' export type ScheduleType = 'once' | 'recurring' const getResponseField = (i: number): string => (i === 0 ? '$survey_response' : `$survey_response_${i}`) @@ -168,6 +169,9 @@ export const surveyLogic = kea([ nextStep, specificQuestionIndex, }), + setDataCollectionType: (dataCollectionType: DataCollectionType) => ({ + dataCollectionType, + }), resetBranchingForQuestion: (questionIndex) => ({ questionIndex }), deleteBranchingLogic: true, archiveSurvey: true, @@ -178,6 +182,8 @@ export const surveyLogic = kea([ setSchedule: (schedule: ScheduleType) => ({ schedule }), resetTargeting: true, + resetSurveyAdaptiveSampling: true, + resetSurveyResponseLimits: true, setFlagPropertyErrors: (errors: any) => ({ errors }), }), loaders(({ props, actions, values }) => ({ @@ -608,6 +614,19 @@ export const surveyLogic = kea([ loadSurveySuccess: () => { actions.loadSurveyUserStats() }, + resetSurveyResponseLimits: () => { + actions.setSurveyValue('responses_limit', null) + }, + + resetSurveyAdaptiveSampling: () => { + actions.setSurveyValues({ + response_sampling_interval: null, + response_sampling_interval_type: null, + response_sampling_limit: null, + response_sampling_start_date: null, + response_sampling_daily_limits: null, + }) + }, resetTargeting: () => { actions.setSurveyValue('linked_flag_id', NEW_SURVEY.linked_flag_id) actions.setSurveyValue('targeting_flag_filters', NEW_SURVEY.targeting_flag_filters) @@ -647,6 +666,12 @@ export const surveyLogic = kea([ setSurveyMissing: () => true, }, ], + dataCollectionType: [ + 'until_stopped' as DataCollectionType, + { + setDataCollectionType: (_, { dataCollectionType }) => dataCollectionType, + }, + ], survey: [ { ...NEW_SURVEY } as NewSurvey | Survey, @@ -877,6 +902,24 @@ export const surveyLogic = kea([ return !!(survey.start_date && !survey.end_date) }, ], + surveyUsesLimit: [ + (s) => [s.survey], + (survey: Survey): boolean => { + return !!(survey.responses_limit && survey.responses_limit > 0) + }, + ], + surveyUsesAdaptiveLimit: [ + (s) => [s.survey], + (survey: Survey): boolean => { + return !!( + survey.response_sampling_interval && + survey.response_sampling_interval > 0 && + survey.response_sampling_interval_type !== '' && + survey.response_sampling_limit && + survey.response_sampling_limit > 0 + ) + }, + ], surveyShufflingQuestionsAvailable: [ (s) => [s.survey], (survey: Survey): boolean => { @@ -1022,6 +1065,7 @@ export const surveyLogic = kea([ } }, ], + getBranchingDropdownValue: [ (s) => [s.survey], (survey) => (questionIndex: number, question: RatingSurveyQuestion | MultipleSurveyQuestion) => { diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 21718ba6c79..24c25f4df48 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -2767,6 +2767,11 @@ export interface Survey { iteration_start_dates?: string[] current_iteration?: number | null current_iteration_start_date?: string + response_sampling_start_date?: string | null + response_sampling_interval_type?: string | null + response_sampling_interval?: number | null + response_sampling_limit?: number | null + response_sampling_daily_limits?: string[] | null } export enum SurveyUrlMatchType { @@ -3276,8 +3281,9 @@ export interface Experiment { feature_flag_key: string feature_flag?: FeatureFlagBasicType exposure_cohort?: number - filters: FilterType - metrics: ExperimentMetric[] + filters: TrendsFilterType | FunnelsFilterType + metrics: (ExperimentTrendsQuery | ExperimentFunnelsQuery)[] + metrics_secondary: (ExperimentTrendsQuery | ExperimentFunnelsQuery)[] parameters: { minimum_detectable_effect?: number recommended_running_time?: number @@ -3285,7 +3291,7 @@ export interface Experiment { feature_flag_variants: MultivariateFlagVariant[] custom_exposure_filter?: FilterType aggregation_group_type_index?: integer - variant_screenshot_media_ids?: Record + variant_screenshot_media_ids?: Record } start_date?: string | null end_date?: string | null diff --git a/frontend/utils.mjs b/frontend/utils.mjs index 3ebfbc196c9..86f9c3a02ea 100644 --- a/frontend/utils.mjs +++ b/frontend/utils.mjs @@ -87,6 +87,7 @@ export function copyIndexHtml( const cssLoader = ` const link = document.createElement("link"); link.rel = "stylesheet"; + link.crossOrigin = "anonymous"; link.href = (window.JS_URL || '') + "/static/" + ${JSON.stringify(cssFile)}; document.head.appendChild(link) ` diff --git a/hogvm/__tests__/__snapshots__/arrays.hoge b/hogvm/__tests__/__snapshots__/arrays.hoge index c62202ce612..2e86bd37870 100644 --- a/hogvm/__tests__/__snapshots__/arrays.hoge +++ b/hogvm/__tests__/__snapshots__/arrays.hoge @@ -29,4 +29,6 @@ 33, 2, 33, 3, 43, 3, 33, 4, 2, "indexOf", 2, 2, "print", 1, 35, 52, "lambda", 1, 0, 6, 33, 2, 36, 0, 13, 38, 53, 0, 33, 1, 33, 2, 33, 3, 33, 4, 33, 5, 43, 5, 2, "arrayCount", 2, 2, "print", 1, 35, 32, "------", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 36, 3, 33, 1, 45, 36, 3, 33, 2, 45, 36, 3, 33, 3, 45, 36, 3, 33, 4, 45, 2, "print", 4, 35, 36, 3, 33, --1, 45, 36, 3, 33, -2, 45, 36, 3, 33, -3, 45, 36, 3, 33, -4, 45, 2, "print", 4, 35, 35, 35, 35, 35] +-1, 45, 36, 3, 33, -2, 45, 36, 3, 33, -3, 45, 36, 3, 33, -4, 45, 2, "print", 4, 35, 32, "------", 2, "print", 1, 35, 32, +"a", 32, "b", 32, "c", 43, 3, 32, "a", 21, 2, "print", 1, 35, 32, "a", 32, "b", 32, "c", 43, 3, 32, "d", 21, 2, "print", +1, 35, 43, 0, 32, "a", 21, 2, "print", 1, 35, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/arrays.js b/hogvm/__tests__/__snapshots__/arrays.js new file mode 100644 index 00000000000..a6954571659 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/arrays.js @@ -0,0 +1,144 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function indexOf (arrOrString, elem) { if (Array.isArray(arrOrString)) { return arrOrString.indexOf(elem) + 1 } else { return 0 } } +function has (arr, elem) { if (!Array.isArray(arr) || arr.length === 0) { return false } return arr.includes(elem) } +function arrayStringConcat (arr, separator = '') { if (!Array.isArray(arr)) { return '' } return arr.join(separator) } +function arraySort (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].sort() } +function arrayReverseSort (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].sort().reverse() } +function arrayReverse (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].reverse() } +function arrayPushFront (arr, item) { if (!Array.isArray(arr)) { return [item] } return [item, ...arr] } +function arrayPushBack (arr, item) { if (!Array.isArray(arr)) { return [item] } return [...arr, item] } +function arrayPopFront (arr) { if (!Array.isArray(arr)) { return [] } return arr.slice(1) } +function arrayPopBack (arr) { if (!Array.isArray(arr)) { return [] } return arr.slice(0, arr.length - 1) } +function arrayCount (func, arr) { let count = 0; for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { count = count + 1 } } return count } +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print([]); +print([1, 2, 3]); +print([1, "2", 3]); +print([1, [2, 3], 4]); +print([1, [2, [3, 4]], 5]); +let a = [1, 2, 3]; +print(__getProperty(a, 2, false)); +print(__getProperty(a, 2, true)); +print(__getProperty(a, 2, true)); +print(__getProperty(a, 7, true)); +print(__getProperty(a, 7, true)); +print(__getProperty([1, 2, 3], 2, false)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 2, false), 2, false), 2, false)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 2, true), 2, true), 2, true)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 2, true), 2, true), 2, true)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 7, true), 4, true), 2, true)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 7, true), 4, true), 2, true)); +print((__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 2, false), 2, false), 2, false) + 1)); +print(__getProperty(__getProperty(__getProperty([1, [2, [3, 4]], 5], 2, false), 2, false), 2, false)); +print("------"); +let b = [1, 2, [1, 2, 3]]; +__setProperty(b, 2, 4); +print(__getProperty(b, 1, false)); +print(__getProperty(b, 2, false)); +print(__getProperty(b, 3, false)); +__setProperty(__getProperty(b, 3, false), 3, 8); +print(b); +print("------"); +print(arrayPushBack([1, 2, 3], 4)); +print(arrayPushFront([1, 2, 3], 0)); +print(arrayPopBack([1, 2, 3])); +print(arrayPopFront([1, 2, 3])); +print(arraySort([3, 2, 1])); +print(arrayReverse([1, 2, 3])); +print(arrayReverseSort([3, 2, 1])); +print(arrayStringConcat([1, 2, 3], ",")); +print("-----"); +let arr = [1, 2, 3, 4]; +print(arr); +arrayPushBack(arr, 5); +print(arr); +arrayPushFront(arr, 0); +print(arr); +arrayPopBack(arr); +print(arr); +arrayPopFront(arr); +print(arr); +arraySort(arr); +print(arr); +arrayReverse(arr); +print(arr); +arrayReverseSort(arr); +print(arr); +print("------"); +print(has(arr, 0)); +print(has(arr, 2)); +print(has(arr, "banana")); +print(has("banananas", "banana")); +print(has("banananas", "foo")); +print(has(["1", "2"], "1")); +print(indexOf([1, 2, 3], 1)); +print(indexOf([1, 2, 3], 2)); +print(indexOf([1, 2, 3], 3)); +print(indexOf([1, 2, 3], 4)); +print(arrayCount(__lambda((x) => (x > 2)), [1, 2, 3, 4, 5])); +print("------"); +let c = [1, 2, 3]; +print(__getProperty(c, 1, false), __getProperty(c, 2, false), __getProperty(c, 3, false), __getProperty(c, 4, false)); +print(__getProperty(c, -1, false), __getProperty(c, -2, false), __getProperty(c, -3, false), __getProperty(c, -4, false)); +print("------"); +print((["a", "b", "c"].includes("a"))); +print((["a", "b", "c"].includes("d"))); +print(([].includes("a"))); diff --git a/hogvm/__tests__/__snapshots__/arrays.stdout b/hogvm/__tests__/__snapshots__/arrays.stdout index 2790d891956..a06cfa41046 100644 --- a/hogvm/__tests__/__snapshots__/arrays.stdout +++ b/hogvm/__tests__/__snapshots__/arrays.stdout @@ -54,3 +54,7 @@ true ------ 1 2 3 null 3 2 1 null +------ +true +false +false diff --git a/hogvm/__tests__/__snapshots__/bytecodeStl.js b/hogvm/__tests__/__snapshots__/bytecodeStl.js new file mode 100644 index 00000000000..2e997fe17d9 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/bytecodeStl.js @@ -0,0 +1,66 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function like (str, pattern) { return __like(str, pattern, false) } +function arrayMap (func, arr) { let result = []; for (let i = 0; i < arr.length; i++) { result = arrayPushBack(result, func(arr[i])) } return result } +function arrayFilter (func, arr) { let result = []; for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { result = arrayPushBack(result, arr[i]) } } return result} +function arrayPushBack (arr, item) { if (!Array.isArray(arr)) { return [item] } return [...arr, item] } +function arrayExists (func, arr) { for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { return true } } return false } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __like(str, pattern, caseInsensitive = false) { + if (caseInsensitive) { + str = str.toLowerCase() + pattern = pattern.toLowerCase() + } + pattern = String(pattern) + .replaceAll(/[-/\\^$*+?.()|[\]{}]/g, '\\$&') + .replaceAll('%', '.*') + .replaceAll('_', '.') + return new RegExp(pattern).test(str) +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("--- arrayMap ----"); +print(arrayMap(__lambda((x) => (x * 2)), [1, 2, 3])); +print("--- arrayExists ----"); +print(arrayExists(__lambda((x) => like(x, "%nana%")), ["apple", "banana", "cherry"])); +print(arrayExists(__lambda((x) => like(x, "%boom%")), ["apple", "banana", "cherry"])); +print(arrayExists(__lambda((x) => like(x, "%boom%")), [])); +print("--- arrayFilter ----"); +print(arrayFilter(__lambda((x) => like(x, "%nana%")), ["apple", "banana", "cherry"])); +print(arrayFilter(__lambda((x) => like(x, "%e%")), ["apple", "banana", "cherry"])); +print(arrayFilter(__lambda((x) => like(x, "%boom%")), [])); diff --git a/hogvm/__tests__/__snapshots__/catch.js b/hogvm/__tests__/__snapshots__/catch.js new file mode 100644 index 00000000000..87734f68fd9 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/catch.js @@ -0,0 +1,134 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} +function NotImplementedError (message, payload) { return __newHogError('NotImplementedError', message, payload) } +function HogError (type, message, payload) { return __newHogError(type, message, payload) } +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} + +function FishError(message) { + return HogError("FishError", message); +} +function FoodError(message) { + return HogError("FoodError", message); +} +try { + throw FishError("You forgot to feed your fish"); +} catch (__error) { if (__error.type === "FoodError") { let e = __error; +print(concat("Problem with your food: ", __getProperty(e, "message", true))); +} + else if (__error.type === "FishError") { let e = __error; +print(concat("Problem with your fish: ", __getProperty(e, "message", true))); +} + else { throw __error; }} +try { + throw FoodError("Your fish are hungry"); +} catch (__error) { if (__error.type === "FoodError") { let e = __error; +print(concat("Problem with your food: ", __getProperty(e, "message", true))); +} + else if (__error.type === "FishError") { let e = __error; +print(concat("Problem with your fish: ", __getProperty(e, "message", true))); +} + else { throw __error; }} +try { + throw NotImplementedError("Your fish are hungry"); +} catch (__error) { if (__error.type === "FoodError") { let e = __error; +print(concat("Problem with your food: ", __getProperty(e, "message", true))); +} + else if (true) { let e = __error; +print(concat("Unknown problem: ", e)); +} +} diff --git a/hogvm/__tests__/__snapshots__/catch2.hoge b/hogvm/__tests__/__snapshots__/catch2.hoge index ce7f940a43a..614e1dfb0b7 100644 --- a/hogvm/__tests__/__snapshots__/catch2.hoge +++ b/hogvm/__tests__/__snapshots__/catch2.hoge @@ -6,10 +6,10 @@ "You forgot to feed your fish", 2, "HogError", 2, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 51, 39, 55, 36, 0, 32, "type", 45, 32, "FishError", 36, 1, 11, 40, 16, 32, "FishError: ", 36, 0, 32, "message", 45, 2, -"concat", 2, 2, "print", 1, 35, 39, 25, 32, "Error of type ", 36, 0, 32, "name", 45, 32, ": ", 36, 0, 32, "message", 45, +"concat", 2, 2, "print", 1, 35, 39, 25, 32, "Error of type ", 36, 0, 32, "type", 45, 32, ": ", 36, 0, 32, "message", 45, 2, "concat", 4, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 49, 50, 12, 32, "FishError", 32, "You forgot to feed your fish", 2, "HogError", 2, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, -51, 39, 55, 36, 0, 32, "type", 45, 32, "Error of type ", 36, 0, 32, "name", 45, 32, ": ", 36, 0, 32, "message", 45, 2, +51, 39, 55, 36, 0, 32, "type", 45, 32, "Error of type ", 36, 0, 32, "type", 45, 32, ": ", 36, 0, 32, "message", 45, 2, "concat", 4, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, 11, 40, 16, 32, "FishError: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/catch2.js b/hogvm/__tests__/__snapshots__/catch2.js new file mode 100644 index 00000000000..ffc20ac2811 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/catch2.js @@ -0,0 +1,142 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} +function HogError (type, message, payload) { return __newHogError(type, message, payload) } +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} + +try { + try { + throw HogError("FishError", "You forgot to feed your fish"); + } catch (__error) { if (__error.type === "FoodError") { let e = __error; + print(concat("Problem with your food: ", __getProperty(e, "message", true))); + } + else { throw __error; }} +} catch (__error) { if (__error.type === "FishError") { let e = __error; +print(concat("FishError: ", __getProperty(e, "message", true))); +} + else if (true) { let e = __error; +print(concat("Error: ", __getProperty(e, "message", true))); +} +} +try { + try { + throw HogError("FunkyError", "You forgot to feed your fish"); + } catch (__error) { if (__error.type === "FoodError") { let e = __error; + print(concat("Problem with your food: ", __getProperty(e, "message", true))); + } + else { throw __error; }} +} catch (__error) { if (__error.type === "FishError") { let e = __error; +print(concat("FishError: ", __getProperty(e, "message", true))); +} + else if (true) { let e = __error; +print(concat("Error of type ", __getProperty(e, "type", true), ": ", __getProperty(e, "message", true))); +} +} +try { + try { + throw HogError("FishError", "You forgot to feed your fish"); + } catch (__error) { if (__error.type === "FoodError") { let e = __error; + print(concat("Problem with your food: ", __getProperty(e, "message", true))); + } + else { throw __error; }} +} catch (__error) { if (true) { let e = __error; +print(concat("Error of type ", __getProperty(e, "type", true), ": ", __getProperty(e, "message", true))); +} + else if (__error.type === "FishError") { let e = __error; +print(concat("FishError: ", __getProperty(e, "message", true))); +} +} diff --git a/hogvm/__tests__/__snapshots__/catch2.stdout b/hogvm/__tests__/__snapshots__/catch2.stdout index f30ba83b8cf..7ee82d979af 100644 --- a/hogvm/__tests__/__snapshots__/catch2.stdout +++ b/hogvm/__tests__/__snapshots__/catch2.stdout @@ -1,3 +1,3 @@ FishError: You forgot to feed your fish -Error of type : You forgot to feed your fish -Error of type : You forgot to feed your fish +Error of type FunkyError: You forgot to feed your fish +Error of type FishError: You forgot to feed your fish diff --git a/hogvm/__tests__/__snapshots__/crypto.js b/hogvm/__tests__/__snapshots__/crypto.js new file mode 100644 index 00000000000..b842d251c8b --- /dev/null +++ b/hogvm/__tests__/__snapshots__/crypto.js @@ -0,0 +1,49 @@ +function sha256HmacChainHex (data, options) { return 'sha256HmacChainHex not implemented' } +function sha256Hex (str, options) { return 'SHA256 is not implemented' } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function md5Hex(string) { return 'MD5 is not implemented' } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let string = "this is a secure string"; +print("string:", string); +print("md5Hex(string):", md5Hex(string)); +print("sha256Hex(string):", sha256Hex(string)); +let data = ["1", "string", "more", "keys"]; +print("data:", data); +print("sha256HmacChainHex(data):", sha256HmacChainHex(data)); diff --git a/hogvm/__tests__/__snapshots__/date.js b/hogvm/__tests__/__snapshots__/date.js new file mode 100644 index 00000000000..71af223af0e --- /dev/null +++ b/hogvm/__tests__/__snapshots__/date.js @@ -0,0 +1,171 @@ +function toUnixTimestampMilli (input, zone) { return __toUnixTimestampMilli(input, zone) } +function toUnixTimestamp (input, zone) { return __toUnixTimestamp(input, zone) } +function toTimeZone (input, zone) { return __toTimeZone(input, zone) } +function toString (value) { return __STLToString(value) } +function toInt(value) { + if (__isHogDateTime(value)) { return Math.floor(value.dt); } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = Math.floor((date - epoch) / (1000 * 60 * 60 * 24)); return diffInDays; } + return !isNaN(parseInt(value)) ? parseInt(value) : null; } +function toFloat(value) { + if (__isHogDateTime(value)) { return value.dt; } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = (date - epoch) / (1000 * 60 * 60 * 24); return diffInDays; } + return !isNaN(parseFloat(value)) ? parseFloat(value) : null; } +function toDateTime (input, zone) { return __toDateTime(input, zone) } +function toDate (input) { return __toDate(input) } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function fromUnixTimestampMilli (input) { return __fromUnixTimestampMilli(input) } +function fromUnixTimestamp (input) { return __fromUnixTimestamp(input) } +function __toUnixTimestampMilli(input, zone) { return __toUnixTimestamp(input, zone) * 1000 } +function __toUnixTimestamp(input, zone) { + if (__isHogDateTime(input)) { return input.dt; } + if (__isHogDate(input)) { return __toHogDateTime(input).dt; } + const date = new Date(input); + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return Math.floor(date.getTime() / 1000);} +function __toTimeZone(input, zone) { if (!__isHogDateTime(input)) { throw new Error('Expected a DateTime') }; return { ...input, zone }} +function __toDateTime(input, zone) { let dt; + if (typeof input === 'number') { dt = input; } + else { const date = new Date(input); if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } dt = date.getTime() / 1000; } + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; } +function __toDate(input) { let date; + if (typeof input === 'number') { date = new Date(input * 1000); } else { date = new Date(input); } + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return { __hogDate__: true, year: date.getUTCFullYear(), month: date.getUTCMonth() + 1, day: date.getUTCDate() }; } +function __fromUnixTimestampMilli(input) { return __toHogDateTime(input / 1000) } +function __fromUnixTimestamp(input) { return __toHogDateTime(input) } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +let dt = fromUnixTimestamp(1234334543); +print(dt); +print(toString(dt)); +print(toInt(toUnixTimestamp(dt))); +print("-"); +let dt2 = toDate("2024-05-03"); +print(dt2); +print(toString(dt2)); +print(toInt(toUnixTimestamp(dt2))); +print("-"); +let dt3 = toDateTime("2024-05-03T12:34:56Z"); +print(dt3); +print(toString(dt3)); +print(toInt(toUnixTimestamp(dt3))); +print("------"); +print(toTimeZone(dt3, "Europe/Brussels")); +print(toString(toTimeZone(dt3, "Europe/Brussels"))); +print("-"); +print(toTimeZone(dt3, "Europe/Tallinn")); +print(toString(toTimeZone(dt3, "Europe/Tallinn"))); +print("-"); +print(toTimeZone(dt3, "America/New_York")); +print(toString(toTimeZone(dt3, "America/New_York"))); +print("------"); +let timestamp = fromUnixTimestamp(1234334543.123); +print("timestamp: ", timestamp); +print("toString(timestamp): ", toString(timestamp)); +print("toInt(timestamp): ", toInt(timestamp)); +print("toDateTime(toInt(timestamp)): ", toDateTime(toInt(timestamp))); +print("toInt(toDateTime(toInt(timestamp))): ", toInt(toDateTime(toInt(timestamp)))); +print("toString(toDateTime(toInt(timestamp))): ", toString(toDateTime(toInt(timestamp)))); +print("toFloat(timestamp): ", toFloat(timestamp)); +print("toDateTime(toFloat(timestamp)): ", toDateTime(toFloat(timestamp))); +print("toFloat(toDateTime(toFloat(timestamp))): ", toFloat(toDateTime(toFloat(timestamp)))); +print("toString(toDateTime(toFloat(timestamp))): ", toString(toDateTime(toFloat(timestamp)))); +print("------"); +let millisTs = fromUnixTimestampMilli(1234334543123); +print("millisTs: ", toString(millisTs)); +print("toString(millisTs): ", toString(millisTs)); +print("toInt(millisTs): ", toInt(millisTs)); +print("toFloat(millisTs): ", toFloat(millisTs)); +print("toUnixTimestampMilli(millisTs): ", toUnixTimestampMilli(millisTs)); +print("------"); +let date = toDate("2024-05-03"); +print(date); +print(toString(date)); +print(toInt(date)); diff --git a/hogvm/__tests__/__snapshots__/dateFormat.js b/hogvm/__tests__/__snapshots__/dateFormat.js new file mode 100644 index 00000000000..a8b96d923e0 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/dateFormat.js @@ -0,0 +1,287 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function fromUnixTimestamp (input) { return __fromUnixTimestamp(input) } +function formatDateTime (input, format, zone) { return __formatDateTime(input, format, zone) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __fromUnixTimestamp(input) { return __toHogDateTime(input) } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __formatDateTime(input, format, zone) { + if (!__isHogDateTime(input)) { throw new Error('Expected a DateTime'); } + if (!format) { throw new Error('formatDateTime requires at least 2 arguments'); } + const timestamp = input.dt * 1000; + let date = new Date(timestamp); + if (!zone) { zone = 'UTC'; } + const padZero = (num, len = 2) => String(num).padStart(len, '0'); + const padSpace = (num, len = 2) => String(num).padStart(len, ' '); + const getDateComponent = (type, options = {}) => { + const formatter = new Intl.DateTimeFormat('en-US', { ...options, timeZone: zone }); + const parts = formatter.formatToParts(date); + const part = parts.find(p => p.type === type); + return part ? part.value : ''; + }; + const getNumericComponent = (type, options = {}) => { + const value = getDateComponent(type, options); + return parseInt(value, 10); + }; + const getWeekNumber = (d) => { + const dateInZone = new Date(d.toLocaleString('en-US', { timeZone: zone })); + const target = new Date(Date.UTC(dateInZone.getFullYear(), dateInZone.getMonth(), dateInZone.getDate())); + const dayNr = (target.getUTCDay() + 6) % 7; + target.setUTCDate(target.getUTCDate() - dayNr + 3); + const firstThursday = new Date(Date.UTC(target.getUTCFullYear(), 0, 4)); + const weekNumber = 1 + Math.round(((target - firstThursday) / 86400000 - 3 + ((firstThursday.getUTCDay() + 6) % 7)) / 7); + return weekNumber; + }; + const getDayOfYear = (d) => { + const startOfYear = new Date(Date.UTC(d.getUTCFullYear(), 0, 1)); + const dateInZone = new Date(d.toLocaleString('en-US', { timeZone: zone })); + const diff = dateInZone - startOfYear; + return Math.floor(diff / 86400000) + 1; + }; + // Token mapping with corrections + const tokens = { + '%a': () => getDateComponent('weekday', { weekday: 'short' }), + '%b': () => getDateComponent('month', { month: 'short' }), + '%c': () => padZero(getNumericComponent('month', { month: '2-digit' })), + '%C': () => getDateComponent('year', { year: '2-digit' }), + '%d': () => padZero(getNumericComponent('day', { day: '2-digit' })), + '%D': () => { + const month = padZero(getNumericComponent('month', { month: '2-digit' })); + const day = padZero(getNumericComponent('day', { day: '2-digit' })); + const year = getDateComponent('year', { year: '2-digit' }); + return `${month}/${day}/${year}`; + }, + '%e': () => padSpace(getNumericComponent('day', { day: 'numeric' })), + '%F': () => { + const year = getNumericComponent('year', { year: 'numeric' }); + const month = padZero(getNumericComponent('month', { month: '2-digit' })); + const day = padZero(getNumericComponent('day', { day: '2-digit' })); + return `${year}-${month}-${day}`; + }, + '%g': () => getDateComponent('year', { year: '2-digit' }), + '%G': () => getNumericComponent('year', { year: 'numeric' }), + '%h': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%H': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })), + '%i': () => padZero(getNumericComponent('minute', { minute: '2-digit' })), + '%I': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%j': () => padZero(getDayOfYear(date), 3), + '%k': () => padSpace(getNumericComponent('hour', { hour: 'numeric', hour12: false })), + '%l': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%m': () => padZero(getNumericComponent('month', { month: '2-digit' })), + '%M': () => getDateComponent('month', { month: 'long' }), + '%n': () => '\n', + '%p': () => getDateComponent('dayPeriod', { hour: 'numeric', hour12: true }), + '%r': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + const second = padZero(getNumericComponent('second', { second: '2-digit' })); + const period = getDateComponent('dayPeriod', { hour: 'numeric', hour12: true }); + return `${hour}:${minute} ${period}`; + }, + '%R': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + return `${hour}:${minute}`; + }, + '%s': () => padZero(getNumericComponent('second', { second: '2-digit' })), + '%S': () => padZero(getNumericComponent('second', { second: '2-digit' })), + '%t': () => '\t', + '%T': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + const second = padZero(getNumericComponent('second', { second: '2-digit' })); + return `${hour}:${minute}:${second}`; + }, + '%u': () => { + let day = getDateComponent('weekday', { weekday: 'short' }); + const dayMap = { 'Mon': '1', 'Tue': '2', 'Wed': '3', 'Thu': '4', 'Fri': '5', 'Sat': '6', 'Sun': '7' }; + return dayMap[day]; + }, + '%V': () => padZero(getWeekNumber(date)), + '%w': () => { + let day = getDateComponent('weekday', { weekday: 'short' }); + const dayMap = { 'Sun': '0', 'Mon': '1', 'Tue': '2', 'Wed': '3', 'Thu': '4', 'Fri': '5', 'Sat': '6' }; + return dayMap[day]; + }, + '%W': () => getDateComponent('weekday', { weekday: 'long' }), + '%y': () => getDateComponent('year', { year: '2-digit' }), + '%Y': () => getNumericComponent('year', { year: 'numeric' }), + '%z': () => { + if (zone === 'UTC') { + return '+0000'; + } else { + const formatter = new Intl.DateTimeFormat('en-US', { + timeZone: zone, + timeZoneName: 'shortOffset', + }); + const parts = formatter.formatToParts(date); + const offsetPart = parts.find(part => part.type === 'timeZoneName'); + if (offsetPart && offsetPart.value) { + const offsetValue = offsetPart.value; + const match = offsetValue.match(/GMT([+-]\d{1,2})(?::(\d{2}))?/); + if (match) { + const sign = match[1][0]; + const hours = padZero(Math.abs(parseInt(match[1], 10))); + const minutes = padZero(match[2] ? parseInt(match[2], 10) : 0); + return `${sign}${hours}${minutes}`; + } + } + return ''; + } + }, + '%%': () => '%', + }; + + // Replace tokens in the format string + let result = ''; + let i = 0; + while (i < format.length) { + if (format[i] === '%') { + const token = format.substring(i, i + 2); + if (tokens[token]) { + result += tokens[token](); + i += 2; + } else { + // If token not found, include '%' and move to next character + result += format[i]; + i += 1; + } + } else { + result += format[i]; + i += 1; + } + } + + return result; +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +let dt = fromUnixTimestamp(1234377543.123456); +print(formatDateTime(dt, "%Y-%m-%d %H:%i:%S")); +print(formatDateTime(dt, "%Y-%m-%d %H:%i:%S", "Europe/Brussels")); +print(formatDateTime(dt, "%Y-%m-%d %H:%i:%S", "America/New_York")); +print(formatDateTime(dt, "%Y%m%dT%H%i%sZ")); +print("-----"); +print(concat("%a: ", formatDateTime(dt, "%a"))); +print(concat("%b: ", formatDateTime(dt, "%b"))); +print(concat("%c: ", formatDateTime(dt, "%c"))); +print(concat("%C: ", formatDateTime(dt, "%C"))); +print(concat("%d: ", formatDateTime(dt, "%d"))); +print(concat("%D: ", formatDateTime(dt, "%D"))); +print(concat("%e: ", formatDateTime(dt, "%e"))); +print(concat("%F: ", formatDateTime(dt, "%F"))); +print(concat("%g: ", formatDateTime(dt, "%g"))); +print(concat("%G: ", formatDateTime(dt, "%G"))); +print(concat("%h: ", formatDateTime(dt, "%h"))); +print(concat("%H: ", formatDateTime(dt, "%H"))); +print(concat("%i: ", formatDateTime(dt, "%i"))); +print(concat("%I: ", formatDateTime(dt, "%I"))); +print(concat("%j: ", formatDateTime(dt, "%j"))); +print(concat("%k: ", formatDateTime(dt, "%k"))); +print(concat("%l: ", formatDateTime(dt, "%l"))); +print(concat("%m: ", formatDateTime(dt, "%m"))); +print(concat("%M: ", formatDateTime(dt, "%M"))); +print(concat("%n: ", formatDateTime(dt, "%n"))); +print(concat("%p: ", formatDateTime(dt, "%p"))); +print(concat("%r: ", formatDateTime(dt, "%r"))); +print(concat("%R: ", formatDateTime(dt, "%R"))); +print(concat("%s: ", formatDateTime(dt, "%s"))); +print(concat("%S: ", formatDateTime(dt, "%S"))); +print(concat("%t: ", formatDateTime(dt, "%t"))); +print(concat("%T: ", formatDateTime(dt, "%T"))); +print(concat("%u: ", formatDateTime(dt, "%u"))); +print(concat("%V: ", formatDateTime(dt, "%V"))); +print(concat("%w: ", formatDateTime(dt, "%w"))); +print(concat("%W: ", formatDateTime(dt, "%W"))); +print(concat("%y: ", formatDateTime(dt, "%y"))); +print(concat("%Y: ", formatDateTime(dt, "%Y"))); +print(concat("%z: ", formatDateTime(dt, "%z"))); +print(concat("%%: ", formatDateTime(dt, "%%"))); +print("-----"); +print(formatDateTime(dt, "one banana")); +print(formatDateTime(dt, "%Y no way %m is this %d a %H real %i time %S")); diff --git a/hogvm/__tests__/__snapshots__/dicts.hoge b/hogvm/__tests__/__snapshots__/dicts.hoge index f0c4895e60b..20c457f34e6 100644 --- a/hogvm/__tests__/__snapshots__/dicts.hoge +++ b/hogvm/__tests__/__snapshots__/dicts.hoge @@ -1,6 +1,6 @@ ["_H", 1, 42, 0, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 32, "other", 32, "thing", 42, 2, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 2, "print", 1, 35, -33, 3, 36, 0, 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, +32, "kk", 36, 0, 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 32, "key", 45, 32, "otherKey", 45, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 32, "key", 45, 32, "otherKey", 45, 2, "print", 1, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/dicts.js b/hogvm/__tests__/__snapshots__/dicts.js new file mode 100644 index 00000000000..a85b45440d5 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/dicts.js @@ -0,0 +1,57 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print({}); +print({"key": "value"}); +print({"key": "value", "other": "thing"}); +print({"key": {"otherKey": "value"}}); +let key = "kk"; +print({[key]: "value"}); +print(__getProperty({"key": "value"}, "key", false)); +print(__getProperty({"key": "value"}, "key", false)); +print(__getProperty(__getProperty({"key": {"otherKey": "value"}}, "key", false), "otherKey", false)); +print(__getProperty(__getProperty({"key": {"otherKey": "value"}}, "key", false), "otherKey", false)); diff --git a/hogvm/__tests__/__snapshots__/dicts.stdout b/hogvm/__tests__/__snapshots__/dicts.stdout index 33e60af57d4..337454355fa 100644 --- a/hogvm/__tests__/__snapshots__/dicts.stdout +++ b/hogvm/__tests__/__snapshots__/dicts.stdout @@ -2,7 +2,7 @@ {'key': 'value'} {'key': 'value', 'other': 'thing'} {'key': {'otherKey': 'value'}} -{3: 'value'} +{'kk': 'value'} value value value diff --git a/hogvm/__tests__/__snapshots__/exceptions.js b/hogvm/__tests__/__snapshots__/exceptions.js new file mode 100644 index 00000000000..46cb9b053a3 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/exceptions.js @@ -0,0 +1,152 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} +function __x_Error (message, payload) { return __newHogError('Error', message, payload) } +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} + +print("start"); +try { + print("try"); +} catch (__error) { if (true) { let e = __error; +print(concat(e, " was the exception")); +} +} +print("------------------"); +print("start"); +try { + print("try"); +} catch (__error) { if (true) { let e = __error; +print("No var for error, but no error"); +} +} +print("------------------"); +try { + print("try again"); + throw __x_Error(); +} catch (__error) { if (true) { let e = __error; +print(concat(e, " was the exception")); +} +} +print("------------------"); +try { + print("try again"); + throw __x_Error(); +} catch (__error) { if (true) { let e = __error; +print("No var for error"); +} +} +print("------------------"); +function third() { + print("Throwing in third"); + throw __x_Error("Threw in third"); +} +function second() { + print("second"); + third(); +} +function first() { + print("first"); + second(); +} +function base() { + print("base"); + try { + first(); + } catch (__error) { if (true) { let e = __error; + print(concat("Caught in base: ", e)); throw e; + } + } +} +try { + base(); +} catch (__error) { if (true) { let e = __error; +print(concat("Caught in root: ", e)); +} +} +print("The end"); diff --git a/hogvm/__tests__/__snapshots__/functionVars.js b/hogvm/__tests__/__snapshots__/functionVars.js new file mode 100644 index 00000000000..9f7ca6b8b4e --- /dev/null +++ b/hogvm/__tests__/__snapshots__/functionVars.js @@ -0,0 +1,72 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function base64Encode (str) { return Buffer.from(str).toString('base64') } +function base64Decode (str) { return Buffer.from(str, 'base64').toString() } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +function execFunction() { + print("execFunction"); +} +function execFunctionNested() { + function execFunction() { + print("execFunctionNew"); + } + print("execFunctionNested"); + execFunction(); +} +execFunction(); +execFunctionNested(); +execFunction(); +print("--------"); +function secondExecFunction() { + print("secondExecFunction"); +} +function secondExecFunctionNested() { + print("secondExecFunctionNested"); + secondExecFunction(); +} +secondExecFunction(); +secondExecFunctionNested(); +secondExecFunction(); +print("--------"); +let decode = __lambda(() => base64Decode); +let sixtyFour = base64Encode; +print(sixtyFour("http://www.google.com")); +print(decode()(sixtyFour("http://www.google.com"))); +print(decode()(sixtyFour("http://www.google.com"))); diff --git a/hogvm/__tests__/__snapshots__/functions.js b/hogvm/__tests__/__snapshots__/functions.js new file mode 100644 index 00000000000..2241faa08aa --- /dev/null +++ b/hogvm/__tests__/__snapshots__/functions.js @@ -0,0 +1,124 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function empty (value) { + if (typeof value === 'object') { + if (Array.isArray(value)) { return value.length === 0 } else if (value === null) { return true } else if (value instanceof Map) { return value.size === 0 } + return Object.keys(value).length === 0 + } else if (typeof value === 'number' || typeof value === 'boolean') { return false } + return !value } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("-- test functions --"); +function add(a, b) { + return (a + b); +} +print(add); +function add2(a, b) { + let c = (a + b); + return c; +} +print(add2); +function mult(a, b) { + return (a * b); +} +print(mult); +function noArgs() { + let url = "basdfasdf"; + let second = (2 + 3); + return second; +} +print(noArgs); +function empty() { + +} +function empty2() { + +} +function empty3() { + +} +function noReturn() { + let a = 1; + let b = 2; + let c = (a + b); +} +function emptyReturn() { + return null; +} +function emptyReturnBeforeOtherStuff() { + return null; + (2 + 2); +} +function emptyReturnBeforeOtherStuffNoSemicolon() { + return (2 + 2); +} +function ifThenReturn() { + if (false) { + return null; + } + return 4; +} +print(add(3, 4)); +print(((add(3, 4) + 100) + add(1, 1))); +print((noArgs() ?? -1)); +print((empty() ?? -1)); +print((empty2() ?? -1)); +print((empty3() ?? -1)); +print((noReturn() ?? -1)); +print((emptyReturn() ?? -1)); +print((emptyReturnBeforeOtherStuff() ?? -1)); +print((emptyReturnBeforeOtherStuffNoSemicolon() ?? -1)); +print((ifThenReturn() ?? -1)); +print(mult(((add(3, 4) + 100) + add(2, 1)), 2)); +print(mult(((add2(3, 4) + 100) + add2(2, 1)), 10)); +function printArgs(arg1, arg2, arg3, arg4, arg5, arg6, arg7) { + print(arg1, arg2, arg3, arg4, arg5, arg6, arg7); +} +let printArgs2 = __lambda((arg1, arg2, arg3, arg4, arg5, arg6, arg7) => { + print(arg1, arg2, arg3, arg4, arg5, arg6, arg7); + return null; +}); +printArgs(1, 2, 3, 4, 5, 6, 7); +printArgs2(1, 2, 3, 4, 5, 6, 7); +printArgs(1, 2, 3, 4, 5, 6); +printArgs2(1, 2, 3, 4, 5, 6); +printArgs(1, 2, 3, 4, 5); +printArgs2(1, 2, 3, 4, 5); +printArgs(); +printArgs2(); diff --git a/hogvm/__tests__/__snapshots__/ifElse.js b/hogvm/__tests__/__snapshots__/ifElse.js new file mode 100644 index 00000000000..995143425a2 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/ifElse.js @@ -0,0 +1,69 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("-- test if else --"); +{ + if (true) { + print(1); + } else { + print(2); + } + if (true) { + print(1); + } else { + print(2); + } + if (false) { + print(1); + } else { + print(2); + } + if (true) { + print(1); + } else { + print(2); + } + let a = true; + if (a) { + let a = 3; + print((a + 2)); + } else { + print(2); + } +} diff --git a/hogvm/__tests__/__snapshots__/ifJump.js b/hogvm/__tests__/__snapshots__/ifJump.js new file mode 100644 index 00000000000..1a6e010aa53 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/ifJump.js @@ -0,0 +1,61 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let props = {}; +let email = __getProperty(props, "email", true); +if ((email == "")) { + print("ERROR - Email not found!"); + print("3"); +} +print("1"); +if ((email == "")) { + print("ERROR - Email not found!"); + print("3"); +} else { + print("else"); +} +print("1"); diff --git a/hogvm/__tests__/__snapshots__/json.js b/hogvm/__tests__/__snapshots__/json.js new file mode 100644 index 00000000000..d8bfeae1e32 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/json.js @@ -0,0 +1,197 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} +function jsonParse (str) { + function convert(x) { + if (Array.isArray(x)) { return x.map(convert) } + else if (typeof x === 'object' && x !== null) { + if (x.__hogDateTime__) { return __toHogDateTime(x.dt, x.zone) + } else if (x.__hogDate__) { return __toHogDate(x.year, x.month, x.day) + } else if (x.__hogError__) { return __newHogError(x.type, x.message, x.payload) } + const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj } + return x } + return convert(JSON.parse(str)) } +function isValidJSON (str) { try { JSON.parse(str); return true } catch (e) { return false } } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function JSONLength (obj, ...path) { + try { if (typeof obj === 'string') { obj = JSON.parse(obj) } } catch (e) { return 0 } + if (typeof obj === 'object' && obj !== null) { + const value = __getNestedValue(obj, path, true) + if (Array.isArray(value)) { + return value.length + } else if (value instanceof Map) { + return value.size + } else if (typeof value === 'object' && value !== null) { + return Object.keys(value).length + } + } + return 0 } +function JSONHas (obj, ...path) { + let current = obj + for (const key of path) { + let currentParsed = current + if (typeof current === 'string') { try { currentParsed = JSON.parse(current) } catch (e) { return false } } + if (currentParsed instanceof Map) { if (!currentParsed.has(key)) { return false }; current = currentParsed.get(key) } + else if (typeof currentParsed === 'object' && currentParsed !== null) { + if (typeof key === 'number') { + if (Array.isArray(currentParsed)) { + if (key < 0) { if (key < -currentParsed.length) { return false }; current = currentParsed[currentParsed.length + key] } + else if (key === 0) { return false } + else { if (key > currentParsed.length) { return false }; current = currentParsed[key - 1] } + } else { return false } + } else { + if (!(key in currentParsed)) { return false } + current = currentParsed[key] + } + } else { return false } + } + return true } +function JSONExtractBool (obj, ...path) { + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch (e) { + return false + } + if (path.length > 0) { + obj = __getNestedValue(obj, path, true) + } + if (typeof obj === 'boolean') { + return obj + } + return false +} +function __getNestedValue(obj, path, allowNull = false) { + let current = obj + for (const key of path) { + if (current == null) { + return null + } + if (current instanceof Map) { + current = current.get(key) + } else if (typeof current === 'object' && current !== null) { + current = current[key] + } else { + return null + } + } + if (current === null && !allowNull) { + return null + } + return current +} + +print(jsonParse("[1,2,3]")); +let event = {"event": "$pageview", "properties": {"$browser": "Chrome", "$os": "Windows"}}; +let json = jsonStringify(event); +print(jsonParse(json)); +print("-- JSONHas --"); +print(JSONHas("{\"a\": \"hello\", \"b\": [-100, 200.0, 300]}", "b")); +print(JSONHas("{\"a\": \"hello\", \"b\": [-100, 200.0, 300]}", "b", 4)); +print(JSONHas({"a": "hello", "b": [-100, 200.0, 300]}, "b")); +print(JSONHas({"a": "hello", "b": [-100, 200.0, 300]}, "b", 4)); +print(JSONHas({"a": "hello", "b": [-100, 200.0, 300]}, "b", -2)); +print(JSONHas({"a": "hello", "b": [-100, 200.0, 300]}, "b", -4)); +print(JSONHas("[1,2,3]", 0)); +print(JSONHas("[1,2,[1,2]]", -1, 1)); +print(JSONHas("[1,2,[1,2]]", -1, -3)); +print(JSONHas("[1,2,[1,2]]", 1, 1)); +print("-- isValidJSON --"); +print(isValidJSON("{\"a\": \"hello\", \"b\": [-100, 200.0, 300]}")); +print(isValidJSON("not a json")); +print("-- JSONLength --"); +print(JSONLength("{\"a\": \"hello\", \"b\": [-100, 200.0, 300]}", "b")); +print(JSONLength("{\"a\": \"hello\", \"b\": [-100, 200.0, 300]}")); +print(JSONLength({"a": "hello", "b": [-100, 200.0, 300]}, "b")); +print(JSONLength({"a": "hello", "b": [-100, 200.0, 300]})); +print("-- JSONExtractBool --"); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": true}", "b")); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": false}", "b")); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": 1}", "b")); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": 0}", "b")); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": \"true\"}", "b")); +print(JSONExtractBool("{\"a\": \"hello\", \"b\": \"false\"}", "b")); +print(JSONExtractBool(true)); +print(JSONExtractBool(false)); +print(JSONExtractBool(1)); +print(JSONExtractBool(0)); +print(JSONExtractBool("true")); +print(JSONExtractBool("false")); diff --git a/hogvm/__tests__/__snapshots__/keysValues.js b/hogvm/__tests__/__snapshots__/keysValues.js new file mode 100644 index 00000000000..0adcf886f2e --- /dev/null +++ b/hogvm/__tests__/__snapshots__/keysValues.js @@ -0,0 +1,54 @@ +function values (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return [...obj] } else if (obj instanceof Map) { return Array.from(obj.values()) } return Object.values(obj) } return [] } +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function keys (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return Array.from(obj.keys()) } else if (obj instanceof Map) { return Array.from(obj.keys()) } return Object.keys(obj) } return [] } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let a = [3, 4, 5]; +let b = tuple(3, 4, 5); +let c = {"key": "value", "other": "val"}; +print(">> A"); +print(keys(a)); +print(values(a)); +print(">> B"); +print(keys(b)); +print(values(b)); +print(">> C"); +print(keys(c)); +print(values(c)); diff --git a/hogvm/__tests__/__snapshots__/lambdas.js b/hogvm/__tests__/__snapshots__/lambdas.js new file mode 100644 index 00000000000..1e2719b7cbb --- /dev/null +++ b/hogvm/__tests__/__snapshots__/lambdas.js @@ -0,0 +1,129 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} +function jsonParse (str) { + function convert(x) { + if (Array.isArray(x)) { return x.map(convert) } + else if (typeof x === 'object' && x !== null) { + if (x.__hogDateTime__) { return __toHogDateTime(x.dt, x.zone) + } else if (x.__hogDate__) { return __toHogDate(x.year, x.month, x.day) + } else if (x.__hogError__) { return __newHogError(x.type, x.message, x.payload) } + const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj } + return x } + return convert(JSON.parse(str)) } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let b = __lambda((x) => (x * 2)); +print(b); +print(b(2)); +print(b(8)); +print("--------"); +let func = __lambda((x) => (x * 2)); +let arr = [func]; +print(func(2)); +print(__getProperty(arr, 1, false)(2)); +print(__lambda((x) => (x * 2))(2)); +print("--------"); +let withArg = __lambda((x) => { + print(x); + print("moo"); + print("cow"); +}); +withArg(2); +print("--------"); +let noArg = __lambda(() => { + print("moo"); + print("cow"); +}); +noArg(); +print("-------- lambdas do not survive json --------"); +print(b); +print(jsonStringify(b)); +let c = jsonParse(jsonStringify(b)); +print(c); diff --git a/hogvm/__tests__/__snapshots__/loops.js b/hogvm/__tests__/__snapshots__/loops.js new file mode 100644 index 00000000000..00fd42ebbf2 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/loops.js @@ -0,0 +1,108 @@ +function values (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return [...obj] } else if (obj instanceof Map) { return Array.from(obj.values()) } return Object.values(obj) } return [] } +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function keys (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return Array.from(obj.keys()) } else if (obj instanceof Map) { return Array.from(obj.keys()) } return Object.keys(obj) } return [] } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("-- test while loop --"); +{ + let i = 0; + while ((i < 3)) { + i = (i + 1) + print(i); + } + print(i); +} +print("-- test for loop --"); +{ + for (let i = 0; (i < 3); i = (i + 1)) { + print(i); + } +} +print("-- test emptier for loop --"); +{ + let i = 0; + for (; (i < 3); ) { + print("woo"); + i = (i + 1) + } + print("hoo"); +} +print("-- for in loop with arrays --"); +{ + let arr = [1, 2, 3]; + for (let i of values(arr)) { + print(i); + } +} +print("-- for in loop with arrays and keys --"); +{ + let arr = [1, 2, 3]; + for (let k of keys(arr)) { let v = arr[k]; { + print(k, v); + } } +} +print("-- for in loop with tuples --"); +{ + let tup = tuple(1, 2, 3); + for (let i of values(tup)) { + print(i); + } +} +print("-- for in loop with tuples and keys --"); +{ + let tup = tuple(1, 2, 3); + for (let k of keys(tup)) { let v = tup[k]; { + print(k, v); + } } +} +print("-- for in loop with dicts --"); +{ + let obj = {"first": "v1", "second": "v2", "third": "v3"}; + for (let i of values(obj)) { + print(i); + } +} +print("-- for in loop with dicts and keys --"); +{ + let obj = {"first": "v1", "second": "v2", "third": "v3"}; + for (let k of keys(obj)) { let v = obj[k]; { + print(k, v); + } } +} diff --git a/hogvm/__tests__/__snapshots__/mandelbrot.js b/hogvm/__tests__/__snapshots__/mandelbrot.js new file mode 100644 index 00000000000..041a6f3bd82 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/mandelbrot.js @@ -0,0 +1,125 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +function mandelbrot(re, im, max_iter) { + let z_re = 0.0; + let z_im = 0.0; + let n = 0; + while (!!((((z_re * z_re) + (z_im * z_im)) <= 4) && (n < max_iter))) { + let temp_re = (((z_re * z_re) - (z_im * z_im)) + re); + let temp_im = (((2 * z_re) * z_im) + im); + z_re = temp_re + z_im = temp_im + n = (n + 1) + } + if ((n == max_iter)) { + return " "; + } else { + return "#"; + } +} +function main() { + let width = 80; + let height = 24; + let xmin = -2.0; + let xmax = 1.0; + let ymin = -1.0; + let ymax = 1.0; + let max_iter = 30; + let y = 0; + while ((y < height)) { + let row = ""; + let x = 0; + while ((x < width)) { + let re = (((x / width) * (xmax - xmin)) + xmin); + let im = (((y / height) * (ymax - ymin)) + ymin); + let letter = mandelbrot(re, im, max_iter); + row = concat(row, letter) + x = (x + 1) + } + print(row); + y = (y + 1) + } +} +main(); diff --git a/hogvm/__tests__/__snapshots__/operations.hoge b/hogvm/__tests__/__snapshots__/operations.hoge index 3a08b489025..4d8a16ec1d6 100644 --- a/hogvm/__tests__/__snapshots__/operations.hoge +++ b/hogvm/__tests__/__snapshots__/operations.hoge @@ -10,20 +10,22 @@ 54, 1, 35, 32, "%x%", 32, "baa", 17, 36, 0, 54, 1, 35, 32, "%A%", 32, "baa", 18, 36, 0, 54, 1, 35, 32, "%C%", 32, "baa", 18, 36, 0, 54, 1, 35, 32, "b", 32, "a", 18, 36, 0, 54, 1, 35, 32, "b", 32, "a", 19, 36, 0, 54, 1, 35, 32, "b", 32, "a", 20, 36, 0, 54, 1, 35, 32, "car", 32, "a", 21, 36, 0, 54, 1, 35, 32, "foo", 32, "a", 21, 36, 0, 54, 1, 35, 32, "car", 32, -"a", 22, 36, 0, 54, 1, 35, 32, "arg", 32, "another", 2, "concat", 2, 36, 0, 54, 1, 35, 33, 1, 31, 2, "concat", 2, 36, 0, -54, 1, 35, 29, 30, 2, "concat", 2, 36, 0, 54, 1, 35, 32, "test", 32, "e.*", 2, "match", 2, 36, 0, 54, 1, 35, 32, "test", -32, "^e.*", 2, "match", 2, 36, 0, 54, 1, 35, 32, "test", 32, "x.*", 2, "match", 2, 36, 0, 54, 1, 35, 32, "e.*", 32, -"test", 23, 36, 0, 54, 1, 35, 32, "e.*", 32, "test", 24, 36, 0, 54, 1, 35, 32, "^e.*", 32, "test", 23, 36, 0, 54, 1, 35, -32, "^e.*", 32, "test", 24, 36, 0, 54, 1, 35, 32, "x.*", 32, "test", 23, 36, 0, 54, 1, 35, 32, "x.*", 32, "test", 24, -36, 0, 54, 1, 35, 32, "EST", 32, "test", 25, 36, 0, 54, 1, 35, 32, "EST", 32, "test", 25, 36, 0, 54, 1, 35, 32, "EST", -32, "test", 26, 36, 0, 54, 1, 35, 33, 1, 2, "toString", 1, 36, 0, 54, 1, 35, 34, 1.5, 2, "toString", 1, 36, 0, 54, 1, -35, 29, 2, "toString", 1, 36, 0, 54, 1, 35, 31, 2, "toString", 1, 36, 0, 54, 1, 35, 32, "string", 2, "toString", 1, 36, -0, 54, 1, 35, 32, "1", 2, "toInt", 1, 36, 0, 54, 1, 35, 32, "bla", 2, "toInt", 1, 36, 0, 54, 1, 35, 32, "1.2", 2, -"toFloat", 1, 36, 0, 54, 1, 35, 32, "bla", 2, "toFloat", 1, 36, 0, 54, 1, 35, 32, "asd", 2, "toUUID", 1, 36, 0, 54, 1, -35, 31, 33, 1, 11, 36, 0, 54, 1, 35, 31, 33, 1, 12, 36, 0, 54, 1, 35, 33, 1, 32, "1", 11, 36, 0, 54, 1, 35, 32, "1", 33, -1, 11, 36, 0, 54, 1, 35, 29, 33, 1, 11, 36, 0, 54, 1, 35, 29, 33, 0, 11, 36, 0, 54, 1, 35, 29, 33, 2, 11, 36, 0, 54, 1, -35, 30, 33, 1, 12, 36, 0, 54, 1, 35, 32, "2", 33, 1, 11, 36, 0, 54, 1, 35, 32, "2", 33, 1, 11, 36, 0, 54, 1, 35, 32, -"2", 33, 1, 12, 36, 0, 54, 1, 35, 32, "2", 33, 1, 15, 36, 0, 54, 1, 35, 32, "2", 33, 1, 16, 36, 0, 54, 1, 35, 32, "2", -33, 1, 13, 36, 0, 54, 1, 35, 32, "2", 33, 1, 14, 36, 0, 54, 1, 35, 33, 2, 32, "1", 11, 36, 0, 54, 1, 35, 33, 2, 32, "1", -11, 36, 0, 54, 1, 35, 33, 2, 32, "1", 12, 36, 0, 54, 1, 35, 33, 2, 32, "1", 15, 36, 0, 54, 1, 35, 33, 2, 32, "1", 16, -36, 0, 54, 1, 35, 33, 2, 32, "1", 13, 36, 0, 54, 1, 35, 33, 2, 32, "1", 14, 36, 0, 54, 1, 35, 35] +"a", 22, 36, 0, 54, 1, 35, 32, "b_x", 32, "bax", 17, 36, 0, 54, 1, 35, 32, "b_x", 32, "baax", 19, 36, 0, 54, 1, 35, 32, +"b%x", 32, "baax", 17, 36, 0, 54, 1, 35, 32, "arg", 32, "another", 2, "concat", 2, 36, 0, 54, 1, 35, 33, 1, 31, 2, +"concat", 2, 36, 0, 54, 1, 35, 29, 30, 2, "concat", 2, 36, 0, 54, 1, 35, 32, "test", 32, "e.*", 2, "match", 2, 36, 0, +54, 1, 35, 32, "test", 32, "^e.*", 2, "match", 2, 36, 0, 54, 1, 35, 32, "test", 32, "x.*", 2, "match", 2, 36, 0, 54, 1, +35, 32, "e.*", 32, "test", 23, 36, 0, 54, 1, 35, 32, "e.*", 32, "test", 24, 36, 0, 54, 1, 35, 32, "^e.*", 32, "test", +23, 36, 0, 54, 1, 35, 32, "^e.*", 32, "test", 24, 36, 0, 54, 1, 35, 32, "x.*", 32, "test", 23, 36, 0, 54, 1, 35, 32, +"x.*", 32, "test", 24, 36, 0, 54, 1, 35, 32, "EST", 32, "test", 25, 36, 0, 54, 1, 35, 32, "EST", 32, "test", 25, 36, 0, +54, 1, 35, 32, "EST", 32, "test", 26, 36, 0, 54, 1, 35, 33, 1, 2, "toString", 1, 36, 0, 54, 1, 35, 34, 1.5, 2, +"toString", 1, 36, 0, 54, 1, 35, 29, 2, "toString", 1, 36, 0, 54, 1, 35, 31, 2, "toString", 1, 36, 0, 54, 1, 35, 32, +"string", 2, "toString", 1, 36, 0, 54, 1, 35, 32, "1", 2, "toInt", 1, 36, 0, 54, 1, 35, 32, "bla", 2, "toInt", 1, 36, 0, +54, 1, 35, 32, "1.2", 2, "toFloat", 1, 36, 0, 54, 1, 35, 32, "bla", 2, "toFloat", 1, 36, 0, 54, 1, 35, 32, "asd", 2, +"toUUID", 1, 36, 0, 54, 1, 35, 31, 33, 1, 11, 36, 0, 54, 1, 35, 31, 33, 1, 12, 36, 0, 54, 1, 35, 33, 1, 32, "1", 11, 36, +0, 54, 1, 35, 32, "1", 33, 1, 11, 36, 0, 54, 1, 35, 29, 33, 1, 11, 36, 0, 54, 1, 35, 29, 33, 0, 11, 36, 0, 54, 1, 35, +29, 33, 2, 11, 36, 0, 54, 1, 35, 30, 33, 1, 12, 36, 0, 54, 1, 35, 32, "2", 33, 1, 11, 36, 0, 54, 1, 35, 32, "2", 33, 1, +11, 36, 0, 54, 1, 35, 32, "2", 33, 1, 12, 36, 0, 54, 1, 35, 32, "2", 33, 1, 15, 36, 0, 54, 1, 35, 32, "2", 33, 1, 16, +36, 0, 54, 1, 35, 32, "2", 33, 1, 13, 36, 0, 54, 1, 35, 32, "2", 33, 1, 14, 36, 0, 54, 1, 35, 33, 2, 32, "1", 11, 36, 0, +54, 1, 35, 33, 2, 32, "1", 11, 36, 0, 54, 1, 35, 33, 2, 32, "1", 12, 36, 0, 54, 1, 35, 33, 2, 32, "1", 15, 36, 0, 54, 1, +35, 33, 2, 32, "1", 16, 36, 0, 54, 1, 35, 33, 2, 32, "1", 13, 36, 0, 54, 1, 35, 33, 2, 32, "1", 14, 36, 0, 54, 1, 35, +35] diff --git a/hogvm/__tests__/__snapshots__/operations.js b/hogvm/__tests__/__snapshots__/operations.js new file mode 100644 index 00000000000..20d4a40f2cc --- /dev/null +++ b/hogvm/__tests__/__snapshots__/operations.js @@ -0,0 +1,224 @@ +function toUUID (value) { return __STLToString(value) } +function toString (value) { return __STLToString(value) } +function toInt(value) { + if (__isHogDateTime(value)) { return Math.floor(value.dt); } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = Math.floor((date - epoch) / (1000 * 60 * 60 * 24)); return diffInDays; } + return !isNaN(parseInt(value)) ? parseInt(value) : null; } +function toFloat(value) { + if (__isHogDateTime(value)) { return value.dt; } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = (date - epoch) / (1000 * 60 * 60 * 24); return diffInDays; } + return !isNaN(parseFloat(value)) ? parseFloat(value) : null; } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function match (str, pattern) { return new RegExp(pattern).test(str) } +function like (str, pattern) { return __like(str, pattern, false) } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} +function ilike (str, pattern) { return __like(str, pattern, true) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __like(str, pattern, caseInsensitive = false) { + if (caseInsensitive) { + str = str.toLowerCase() + pattern = pattern.toLowerCase() + } + pattern = String(pattern) + .replaceAll(/[-/\\^$*+?.()|[\]{}]/g, '\\$&') + .replaceAll('%', '.*') + .replaceAll('_', '.') + return new RegExp(pattern).test(str) +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +function test(val) { + print(jsonStringify(val)); +} +print("-- test the most common expressions --"); +test((1 + 2)); +test((1 - 2)); +test((3 * 2)); +test((3 / 2)); +test((3 % 2)); +test(!!(1 && 2)); +test(!!(1 || 0)); +test(!!(1 && 0)); +test(!!(1 || !!(0 && 1) || 2)); +test(!!(1 && 0 && 1)); +test(!!(!!(1 || 2) && !!(1 || 2))); +test(true); +test((!true)); +test(false); +test(null); +test(3.14); +test((1 == 2)); +test((1 == 2)); +test((1 != 2)); +test((1 < 2)); +test((1 <= 2)); +test((1 > 2)); +test((1 >= 2)); +test(like("a", "b")); +test(like("baa", "%a%")); +test(like("baa", "%x%")); +test(ilike("baa", "%A%")); +test(ilike("baa", "%C%")); +test(ilike("a", "b")); +test(!like("a", "b")); +test(!ilike("a", "b")); +test(("car".includes("a"))); +test(("foo".includes("a"))); +test((!"car".includes("a"))); +test(like("bax", "b_x")); +test(!like("baax", "b_x")); +test(like("baax", "b%x")); +test(concat("arg", "another")); +test(concat(1, null)); +test(concat(true, false)); +test(match("test", "e.*")); +test(match("test", "^e.*")); +test(match("test", "x.*")); +test(new RegExp("e.*").test("test")); +test(!(new RegExp("e.*").test("test"))); +test(new RegExp("^e.*").test("test")); +test(!(new RegExp("^e.*").test("test"))); +test(new RegExp("x.*").test("test")); +test(!(new RegExp("x.*").test("test"))); +test(new RegExp("EST", "i").test("test")); +test(new RegExp("EST", "i").test("test")); +test(!(new RegExp("EST", "i").test("test"))); +test(toString(1)); +test(toString(1.5)); +test(toString(true)); +test(toString(null)); +test(toString("string")); +test(toInt("1")); +test(toInt("bla")); +test(toFloat("1.2")); +test(toFloat("bla")); +test(toUUID("asd")); +test((1 == null)); +test((1 != null)); +test(("1" == 1)); +test((1 == "1")); +test((1 == true)); +test((0 == true)); +test((2 == true)); +test((1 != false)); +test((1 == "2")); +test((1 == "2")); +test((1 != "2")); +test((1 < "2")); +test((1 <= "2")); +test((1 > "2")); +test((1 >= "2")); +test(("1" == 2)); +test(("1" == 2)); +test(("1" != 2)); +test(("1" < 2)); +test(("1" <= 2)); +test(("1" > 2)); +test(("1" >= 2)); diff --git a/hogvm/__tests__/__snapshots__/operations.stdout b/hogvm/__tests__/__snapshots__/operations.stdout index 849bb269926..c08036afe55 100644 --- a/hogvm/__tests__/__snapshots__/operations.stdout +++ b/hogvm/__tests__/__snapshots__/operations.stdout @@ -33,6 +33,9 @@ true true false false +true +true +true "arganother" "1" "truefalse" diff --git a/hogvm/__tests__/__snapshots__/printLoops.js b/hogvm/__tests__/__snapshots__/printLoops.js new file mode 100644 index 00000000000..a447308a88c --- /dev/null +++ b/hogvm/__tests__/__snapshots__/printLoops.js @@ -0,0 +1,158 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} +function jsonParse (str) { + function convert(x) { + if (Array.isArray(x)) { return x.map(convert) } + else if (typeof x === 'object' && x !== null) { + if (x.__hogDateTime__) { return __toHogDateTime(x.dt, x.zone) + } else if (x.__hogDate__) { return __toHogDate(x.year, x.month, x.day) + } else if (x.__hogError__) { return __newHogError(x.type, x.message, x.payload) } + const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj } + return x } + return convert(JSON.parse(str)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } } +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +let obj = {"key": "value", "key2": "value2"}; +let str = "na"; +for (let i = 0; (i < 100); i = (i + 1)) { + str = concat(str, "na") + __setProperty(obj, concat("key_", i), {"wasted": concat("memory: ", str, " batman!"), "something": obj}); +} +print(obj); +let json = jsonStringify(obj); +print(jsonParse(json)); diff --git a/hogvm/__tests__/__snapshots__/printLoops2.js b/hogvm/__tests__/__snapshots__/printLoops2.js new file mode 100644 index 00000000000..3bf13ec5510 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/printLoops2.js @@ -0,0 +1,156 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} +function jsonParse (str) { + function convert(x) { + if (Array.isArray(x)) { return x.map(convert) } + else if (typeof x === 'object' && x !== null) { + if (x.__hogDateTime__) { return __toHogDateTime(x.dt, x.zone) + } else if (x.__hogDate__) { return __toHogDate(x.year, x.month, x.day) + } else if (x.__hogError__) { return __newHogError(x.type, x.message, x.payload) } + const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj } + return x } + return convert(JSON.parse(str)) } +function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') } +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } } +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} +function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + +let root = {"key": "value", "key2": "value2"}; +let leaf = {"key": "value", "key2": "value2"}; +for (let i = 0; (i < 30); i = (i + 1)) { + __setProperty(root, concat("key_", i), {"something": leaf}); +} +print(root); +print(jsonParse(jsonStringify(root))); diff --git a/hogvm/__tests__/__snapshots__/properties.js b/hogvm/__tests__/__snapshots__/properties.js new file mode 100644 index 00000000000..a09f437d4d7 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/properties.js @@ -0,0 +1,122 @@ +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +{ + let r = [1, 2, {"d": tuple(1, 3, 42, 6)}]; + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 2, false)); +} +{ + let r = [1, 2, {"d": tuple(1, 3, 42, 6)}]; + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, false)); +} +{ + let r = [1, 2, {"d": tuple(1, 3, 42, 6)}]; + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 4, false)); +} +{ + let r = {"d": tuple(1, 3, 42, 6)}; + print(__getProperty(__getProperty(r, "d", true), 2, false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + __setProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, 3); + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + __setProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, 3); + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + __setProperty(__getProperty(r, 3, false), "c", [666]); + print(__getProperty(r, 3, false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + __setProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, 3); + print(__getProperty(__getProperty(r, 3, false), "d", false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + __setProperty(__getProperty(r, 3, false), "d", ["a", "b", "c", "d"]); + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, false)); +} +{ + let r = [1, 2, {"d": [1, 3, 42, 3]}]; + let g = "d"; + __setProperty(__getProperty(r, 3, false), g, ["a", "b", "c", "d"]); + print(__getProperty(__getProperty(__getProperty(r, 3, false), "d", false), 3, false)); +} +{ + let event = {"event": "$pageview", "properties": {"$browser": "Chrome", "$os": "Windows"}}; + __setProperty(__getProperty(event, "properties", false), "$browser", "Firefox"); + print(event); +} +{ + let event = {"event": "$pageview", "properties": {"$browser": "Chrome", "$os": "Windows"}}; + __setProperty(__getProperty(event, "properties", true), "$browser", "Firefox") + print(event); +} +{ + let event = {"event": "$pageview", "properties": {"$browser": "Chrome", "$os": "Windows"}}; + let config = {}; + print(event); +} diff --git a/hogvm/__tests__/__snapshots__/recursion.js b/hogvm/__tests__/__snapshots__/recursion.js new file mode 100644 index 00000000000..1eb354a7e50 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/recursion.js @@ -0,0 +1,56 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let fibonacci = __lambda((number) => { + if ((number < 2)) { + return number; + } else { + return (fibonacci((number - 1)) + fibonacci((number - 2))); + } +}); +print(fibonacci(6)); +function hogonacci(number) { + if ((number < 2)) { + return number; + } else { + return (hogonacci((number - 1)) + hogonacci((number - 2))); + } +} +print(hogonacci(6)); diff --git a/hogvm/__tests__/__snapshots__/scope.js b/hogvm/__tests__/__snapshots__/scope.js new file mode 100644 index 00000000000..6bb0aa44031 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/scope.js @@ -0,0 +1,122 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +let dbl = __lambda((x) => (x * 2)); +print(dbl); +print(dbl(2)); +print(dbl(8)); +print("--------"); +let __x_var = 5; +let varify = __lambda((x) => (x * __x_var)); +print(varify(2)); +__x_var = 10 +print(varify(2)); +print(varify(8)); +print("--------"); +function bigVar() { + let __x_var = 5; + let varify = __lambda((x) => (x * __x_var)); + return varify; +} +let bigVarify = bigVar(); +print(bigVarify(2)); +print(bigVarify(8)); +print("--------"); +let a = 3; +function outerA() { + print(a); + a = 4 + print(a); +} +function innerA() { + print(a); + outerA(); + print(a); +} +print(a); +innerA(); +print(a); +print("--------"); +let b = {"key": 3}; +function outerB() { + print(b); + __setProperty(b, "key", 4) + print(b); +} +function innerB() { + print(b); + outerB(); + print(b); +} +print(b); +innerB(); +print(b); +print("--------"); +function outerC() { + let x = "outside"; + function innerC() { + print(x); + } + innerC(); +} +outerC(); +print("--------"); +function myFunctionOuter() { + let b = 3; + function myFunction(a) { + return (a + b); + } + print(myFunction(2)); + print(myFunction(3)); +} +myFunctionOuter(); +print("--------"); diff --git a/hogvm/__tests__/__snapshots__/stl.js b/hogvm/__tests__/__snapshots__/stl.js new file mode 100644 index 00000000000..9af5082cd86 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/stl.js @@ -0,0 +1,121 @@ +function upper (value) { return value.toUpperCase() } +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function reverse (value) { return value.split('').reverse().join('') } +function replaceOne (str, searchValue, replaceValue) { return str.replace(searchValue, replaceValue) } +function replaceAll (str, searchValue, replaceValue) { return str.replaceAll(searchValue, replaceValue) } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function notEmpty (value) { return !empty(value) } +function lower (value) { return value.toLowerCase() } +function length (value) { return value.length } +function generateUUIDv4 () { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) { const r = (Math.random() * 16) | 0; const v = c === 'x' ? r : (r & 0x3) | 0x8; return v.toString(16) })} +function encodeURLComponent (str) { return encodeURIComponent(str) } +function empty (value) { + if (typeof value === 'object') { + if (Array.isArray(value)) { return value.length === 0 } else if (value === null) { return true } else if (value instanceof Map) { return value.size === 0 } + return Object.keys(value).length === 0 + } else if (typeof value === 'number' || typeof value === 'boolean') { return false } + return !value } +function decodeURLComponent (str) { return decodeURIComponent(str) } +function base64Encode (str) { return Buffer.from(str).toString('base64') } +function base64Decode (str) { return Buffer.from(str, 'base64').toString() } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("-- empty, notEmpty, length, lower, upper, reverse --"); +if (!!(empty("") && notEmpty("234"))) { + print(length("123")); +} +if ((lower("Tdd4gh") == "tdd4gh")) { + print(upper("test")); +} +print(reverse("spinner")); +print(""); +print("-- encodeURLComponent, decodeURLComponent --"); +print(encodeURLComponent("http://www.google.com")); +print(encodeURLComponent("tom & jerry")); +print(decodeURLComponent(encodeURLComponent("http://www.google.com"))); +print(decodeURLComponent(encodeURLComponent("tom & jerry"))); +print(""); +print("-- base64Encode, base64Decode --"); +print(base64Encode("http://www.google.com")); +print(base64Encode("tom & jerry")); +print(base64Decode(base64Encode("http://www.google.com"))); +print(base64Decode(base64Encode("tom & jerry"))); +print(""); +print("-- empty --"); +print(empty(null)); +print(empty(0)); +print(empty(1)); +print(empty(-1)); +print(empty(0.0)); +print(empty(0.01)); +print(empty("")); +print(empty("string")); +print(empty("0")); +print(empty([])); +print(empty({})); +print(empty(tuple())); +print(empty(tuple(0))); +print(empty(tuple(1, 2))); +print(empty(true)); +print(empty(false)); +print(""); +print("-- notEmpty --"); +print(notEmpty(null)); +print(notEmpty(0)); +print(notEmpty(1)); +print(notEmpty(-1)); +print(notEmpty(0.0)); +print(notEmpty(0.01)); +print(notEmpty("")); +print(notEmpty("string")); +print(notEmpty("0")); +print(notEmpty([])); +print(notEmpty({})); +print(notEmpty(tuple())); +print(notEmpty(tuple(0))); +print(notEmpty(tuple(1, 2))); +print(notEmpty(true)); +print(notEmpty(false)); +print(""); +print("-- replaceAll, replaceOne --"); +print(replaceAll("hello world", "l", "L")); +print(replaceOne("hello world", "l", "L")); +print(""); +print("-- generateUUIDv4 --"); +print(length(generateUUIDv4())); diff --git a/hogvm/__tests__/__snapshots__/strings.js b/hogvm/__tests__/__snapshots__/strings.js new file mode 100644 index 00000000000..9ce51632b4b --- /dev/null +++ b/hogvm/__tests__/__snapshots__/strings.js @@ -0,0 +1,129 @@ +function trimRight (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let end = str.length + while (str[end - 1] === char) { + end-- + } + return str.slice(0, end) +} +function trimLeft (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let start = 0 + while (str[start] === char) { + start++ + } + return str.slice(start) +} +function trim (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let start = 0 + while (str[start] === char) { + start++ + } + let end = str.length + while (str[end - 1] === char) { + end-- + } + if (start >= end) { + return '' + } + return str.slice(start, end) +} +function splitByString (separator, str, maxSplits) { if (maxSplits === undefined || maxSplits === null) { return str.split(separator) } return str.split(separator, maxSplits) } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function positionCaseInsensitive (str, elem) { if (typeof str === 'string') { return str.toLowerCase().indexOf(String(elem).toLowerCase()) + 1 } else { return 0 } } +function position (str, elem) { if (typeof str === 'string') { return str.indexOf(String(elem)) + 1 } else { return 0 } } +function notLike (str, pattern) { return !__like(str, pattern, false) } +function notILike (str, pattern) { return !__like(str, pattern, true) } +function like (str, pattern) { return __like(str, pattern, false) } +function ilike (str, pattern) { return __like(str, pattern, true) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __like(str, pattern, caseInsensitive = false) { + if (caseInsensitive) { + str = str.toLowerCase() + pattern = pattern.toLowerCase() + } + pattern = String(pattern) + .replaceAll(/[-/\\^$*+?.()|[\]{}]/g, '\\$&') + .replaceAll('%', '.*') + .replaceAll('_', '.') + return new RegExp(pattern).test(str) +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print(trim(" hello world ")); +print(trimLeft(" hello world ")); +print(trimRight(" hello world ")); +print(trim("xxxx hello world xx", "x")); +print(trimLeft("xxxx hello world xx", "x")); +print(trimRight("xxxx hello world xx", "x")); +print(splitByString(" ", "hello world and more")); +print(splitByString(" ", "hello world and more", 1)); +print(splitByString(" ", "hello world and more", 2)); +print(splitByString(" ", "hello world and more", 10)); +print(like("banana", "N")); +print(like("banana", "n")); +print(like("banana", "naan")); +print(ilike("banana", "N")); +print(ilike("banana", "n")); +print(ilike("banana", "naan")); +print(notLike("banana", "N")); +print(notILike("banana", "NO")); +print(position("abc", "a")); +print(position("abc", "b")); +print(position("abc", "c")); +print(position("abc", "d")); +print(positionCaseInsensitive("AbC", "a")); +print(positionCaseInsensitive("AbC", "b")); +print(positionCaseInsensitive("AbC", "c")); +print(positionCaseInsensitive("AbC", "d")); diff --git a/hogvm/__tests__/__snapshots__/tuples.js b/hogvm/__tests__/__snapshots__/tuples.js new file mode 100644 index 00000000000..7ac28573165 --- /dev/null +++ b/hogvm/__tests__/__snapshots__/tuples.js @@ -0,0 +1,67 @@ +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print(tuple()); +print(tuple(1)); +print(tuple(1, 2)); +print(tuple(1, 2)); +print(tuple(1, 2, 3)); +print(tuple(1, "2", 3)); +print(tuple(1, tuple(2, 3), 4)); +print(tuple(1, tuple(2, tuple(3, 4)), 5)); +let a = tuple(1, 2, 3); +print(__getProperty(a, 2, false)); +print(__getProperty(a, 2, true)); +print(__getProperty(a, 8, true)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 2, false), 2, false), 2, false)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 2, true), 2, true), 2, true)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 2, true), 2, true), 2, true)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 4, true), 7, true), 2, true)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 4, true), 7, true), 2, true)); +print(__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 2, false), 2, false), 2, false)); +print((__getProperty(__getProperty(__getProperty(tuple(1, tuple(2, tuple(3, 4)), 5), 2, false), 2, false), 2, false) + 1)); diff --git a/hogvm/__tests__/__snapshots__/typeof.js b/hogvm/__tests__/__snapshots__/typeof.js new file mode 100644 index 00000000000..e4287e026eb --- /dev/null +++ b/hogvm/__tests__/__snapshots__/typeof.js @@ -0,0 +1,88 @@ +function __x_typeof (value) { + if (value === null || value === undefined) { return 'null' + } else if (__isHogDateTime(value)) { return 'datetime' + } else if (__isHogDate(value)) { return 'date' + } else if (__isHogError(value)) { return 'error' + } else if (typeof value === 'function') { return 'function' + } else if (Array.isArray(value)) { if (value.__isHogTuple) { return 'tuple' } return 'array' + } else if (typeof value === 'object') { return 'object' + } else if (typeof value === 'number') { return Number.isInteger(value) ? 'integer' : 'float' + } else if (typeof value === 'string') { return 'string' + } else if (typeof value === 'boolean') { return 'boolean' } + return 'unknown' +} +function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function toDateTime (input, zone) { return __toDateTime(input, zone) } +function toDate (input) { return __toDate(input) } +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __toDateTime(input, zone) { let dt; + if (typeof input === 'number') { dt = input; } + else { const date = new Date(input); if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } dt = date.getTime() / 1000; } + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; } +function __toDate(input) { let date; + if (typeof input === 'number') { date = new Date(input * 1000); } else { date = new Date(input); } + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return { __hogDate__: true, year: date.getUTCFullYear(), month: date.getUTCMonth() + 1, day: date.getUTCDate() }; } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} +function __x_Error (message, payload) { return __newHogError('Error', message, payload) } +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} + +function test(obj) { + print(__x_typeof(obj)); +} +test("hello world"); +test(123); +test(1.23); +test(true); +test(false); +test(null); +test({}); +test([]); +test(tuple(1, 2, 3)); +test(__lambda(() => (1 + 2))); +test(toDateTime("2021-01-01T00:00:00Z")); +test(toDate("2021-01-01")); +test(__x_Error("BigError", "message")); diff --git a/hogvm/__tests__/__snapshots__/upvalues.js b/hogvm/__tests__/__snapshots__/upvalues.js new file mode 100644 index 00000000000..39182158f6b --- /dev/null +++ b/hogvm/__tests__/__snapshots__/upvalues.js @@ -0,0 +1,57 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __lambda (fn) { return fn } +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +function returnCallable(a) { + return __lambda((x) => (x * a)); +} +let double = returnCallable(2); +let triple = returnCallable(3); +print(double(2)); +print(triple(2)); +print("----------"); +function outer() { + let x = "outside"; + function inner() { + print(x); + } + return inner; +} +let closure = outer(); +closure(); diff --git a/hogvm/__tests__/__snapshots__/variables.js b/hogvm/__tests__/__snapshots__/variables.js new file mode 100644 index 00000000000..c6cb97d354c --- /dev/null +++ b/hogvm/__tests__/__snapshots__/variables.js @@ -0,0 +1,53 @@ +function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +function __isHogError(obj) {return obj && obj.__hogError__ === true} +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', '`': '\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; +} + +print("-- test variables --"); +{ + let a = (1 + 2); + print(a); + let b = (a + 4); + print(b); +} +print("-- test variable reassignment --"); +{ + let a = 1; + a = (a + 3) + a = (a * 2) + print(a); +} diff --git a/hogvm/__tests__/arrays.hog b/hogvm/__tests__/arrays.hog index cbf74d696f8..7cb87188f7f 100644 --- a/hogvm/__tests__/arrays.hog +++ b/hogvm/__tests__/arrays.hog @@ -76,3 +76,8 @@ print('------') let c := [1,2,3] print(c[1], c[2], c[3], c[4]) print(c[-1], c[-2], c[-3], c[-4]) + +print('------') +print('a' in ['a', 'b', 'c']) +print('d' in ['a', 'b', 'c']) +print('a' in []) diff --git a/hogvm/__tests__/catch2.hog b/hogvm/__tests__/catch2.hog index cd48ab951cf..ea3c19550eb 100644 --- a/hogvm/__tests__/catch2.hog +++ b/hogvm/__tests__/catch2.hog @@ -19,7 +19,7 @@ try { } catch (e: FishError) { print(f'FishError: {e.message}') } catch (e: Error) { - print(f'Error of type {e.name}: {e.message}') + print(f'Error of type {e.type}: {e.message}') } try { @@ -29,7 +29,7 @@ try { print(f'Problem with your food: {e.message}') } } catch (e: Error) { - print(f'Error of type {e.name}: {e.message}') + print(f'Error of type {e.type}: {e.message}') } catch (e: FishError) { print(f'FishError: {e.message}') } diff --git a/hogvm/__tests__/dicts.hog b/hogvm/__tests__/dicts.hog index 8523c26cb5f..0c060bc2649 100644 --- a/hogvm/__tests__/dicts.hog +++ b/hogvm/__tests__/dicts.hog @@ -3,7 +3,10 @@ print({'key': 'value'}) print({'key': 'value', 'other': 'thing'}) print({'key': {'otherKey': 'value'}}) -let key := 3 +// We support non-string keys... in the HogVM. +// Keys are always converted to a string in the transpiled JS version. +// TODO: this might be worth revisiting +let key := 'kk' print({key: 'value'}) print({'key': 'value', }.key) diff --git a/hogvm/__tests__/operations.hog b/hogvm/__tests__/operations.hog index 259353afcf7..6a5f299d22f 100644 --- a/hogvm/__tests__/operations.hog +++ b/hogvm/__tests__/operations.hog @@ -37,6 +37,9 @@ test('a' not ilike 'b') // true test('a' in 'car') // true test('a' in 'foo') // false test('a' not in 'car') // false +test('bax' like 'b_x') +test('baax' not like 'b_x') +test('baax' like 'b%x') test(concat('arg', 'another')) // 'arganother' test(concat(1, NULL)) // '1' test(concat(true, false)) // 'truefalse' diff --git a/hogvm/python/test/test_execute.py b/hogvm/python/test/test_execute.py index 6cdd701e826..c4ac04bcff3 100644 --- a/hogvm/python/test/test_execute.py +++ b/hogvm/python/test/test_execute.py @@ -10,7 +10,7 @@ from hogvm.python.operation import ( HOGQL_BYTECODE_VERSION as VERSION, ) from hogvm.python.utils import UncaughtHogVMException -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.hogql.parser import parse_expr, parse_program diff --git a/hogvm/python/utils.py b/hogvm/python/utils.py index 4b5ae2ee05d..b7c0ec66919 100644 --- a/hogvm/python/utils.py +++ b/hogvm/python/utils.py @@ -26,7 +26,7 @@ class UncaughtHogVMException(HogVMException): def like(string, pattern, flags=0): - pattern = re.escape(pattern).replace("%", ".*") + pattern = re.escape(pattern).replace("%", ".*").replace("_", ".") re_pattern = re.compile(pattern, flags) return re_pattern.search(string) is not None diff --git a/hogvm/stl/compile.py b/hogvm/stl/compile.py index 11cb44dfde8..0ef0cf731dd 100755 --- a/hogvm/stl/compile.py +++ b/hogvm/stl/compile.py @@ -5,7 +5,7 @@ import glob import json from posthog.hogql import ast -from posthog.hogql.bytecode import create_bytecode, parse_program +from posthog.hogql.compiler.bytecode import create_bytecode, parse_program source = "hogvm/stl/src/*.hog" target_ts = "hogvm/typescript/src/stl/bytecode.ts" diff --git a/hogvm/test.sh b/hogvm/test.sh index 99b3f0dda37..c30222e12a6 100755 --- a/hogvm/test.sh +++ b/hogvm/test.sh @@ -1,32 +1,119 @@ #!/bin/bash set -e + +# List of test files to skip the compiledjs tests +SKIP_COMPILEDJS_FILES=("crypto.hog") + +# Navigate to the script's directory +cd "$(dirname "$0")" + +# Build the project cd typescript pnpm run build cd .. +# Navigate to the project root (parent directory of 'hogvm') cd .. -rm -f hogvm/__tests__/__snapshots__/*.stdout.nodejs -rm -f hogvm/__tests__/__snapshots__/*.stdout.python +# Function to compute the basename for a given file +get_basename() { + local file="$1" + local base="${file%.hog}" + base="${base##*/}" + echo "hogvm/__tests__/__snapshots__/$base" +} -for file in hogvm/__tests__/*.hog; do +# Function to check if a value is in an array +is_in_array() { + local val="$1" + shift + local arr=("$@") + for item in "${arr[@]}"; do + if [ "$item" == "$val" ]; then + return 0 + fi + done + return 1 +} + +# Check if an argument is provided +if [ "$#" -eq 1 ]; then + test_file="$1" + # Adjust the test file path if it doesn't start with 'hogvm/' + if [[ ! "$test_file" == hogvm/* ]]; then + test_file="hogvm/__tests__/$test_file" + fi + # Check if the test file exists + if [ ! -f "$test_file" ]; then + echo "Test file $test_file does not exist." + exit 1 + fi + test_files=("$test_file") + # Remove previous outputs for this test file only + basename=$(get_basename "$test_file") + rm -f "$basename.stdout.nodejs" "$basename.stdout.python" "$basename.stdout.compiledjs" +else + shopt -s nullglob + test_files=(hogvm/__tests__/*.hog) + shopt -u nullglob + + if [ ${#test_files[@]} -eq 0 ]; then + echo "No test files found in hogvm/__tests__/" + exit 1 + fi + + # Remove all previous outputs + rm -f hogvm/__tests__/__snapshots__/*.stdout.nodejs + rm -f hogvm/__tests__/__snapshots__/*.stdout.python + rm -f hogvm/__tests__/__snapshots__/*.stdout.compiledjs +fi + +for file in "${test_files[@]}"; do echo "Testing $file" - # from hogvm/__tests__/*.hog get hogvm/__tests__/__snapshots__/* - basename="${file%.hog}" - basename="${basename##*/}" - basename="hogvm/__tests__/__snapshots__/$basename" + basename=$(get_basename "$file") + filename=$(basename "$file") - ./bin/hoge $file $basename.hoge - ./bin/hog --nodejs $basename.hoge > $basename.stdout.nodejs - ./bin/hog --python $basename.hoge > $basename.stdout.python - set +e - diff $basename.stdout.nodejs $basename.stdout.python - if [ $? -eq 0 ]; then - mv $basename.stdout.nodejs $basename.stdout - rm $basename.stdout.python + ./bin/hoge "$file" "$basename.hoge" + ./bin/hog --nodejs "$basename.hoge" > "$basename.stdout.nodejs" + ./bin/hog --python "$basename.hoge" > "$basename.stdout.python" + + # Check if the current file should skip the compiledjs tests + if is_in_array "$filename" "${SKIP_COMPILEDJS_FILES[@]}"; then + # Skip compiledjs steps for this file + echo "Skipping compiledjs tests for $filename" + set +e + diff "$basename.stdout.nodejs" "$basename.stdout.python" + if [ $? -eq 0 ]; then + mv "$basename.stdout.nodejs" "$basename.stdout" + rm "$basename.stdout.python" + echo "Test passed" + else + echo "Test failed: Output differs between Node.js and Python interpreters." + fi + set -e else - echo "Test failed" + # Proceed with compiledjs tests + set +e + ./bin/hoge "$file" "$basename.js" + node "$basename.js" > "$basename.stdout.compiledjs" 2>&1 + set -e + + set +e + diff "$basename.stdout.nodejs" "$basename.stdout.compiledjs" + if [ $? -eq 0 ]; then + diff "$basename.stdout.nodejs" "$basename.stdout.python" + if [ $? -eq 0 ]; then + mv "$basename.stdout.nodejs" "$basename.stdout" + rm "$basename.stdout.python" + rm "$basename.stdout.compiledjs" + echo "Test passed" + else + echo "Test failed: Output differs between Node.js and Python interpreters." + fi + else + echo "Test failed: Output differs between Node.js interpreter and compiled JavaScript." + fi + set -e fi - set -e done diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index ff4bd95f532..f48fc844452 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.58", + "version": "1.0.59", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "source": "src/index.ts", diff --git a/hogvm/typescript/src/stl/stl.ts b/hogvm/typescript/src/stl/stl.ts index f398d6dd0b5..924c355cef9 100644 --- a/hogvm/typescript/src/stl/stl.ts +++ b/hogvm/typescript/src/stl/stl.ts @@ -19,6 +19,8 @@ import { } from './date' import { printHogStringOutput } from './print' +// TODO: this file should be generated from or mergred with posthog/hogql/compiler/javascript_stl.py + function STLToString(args: any[]): string { if (isHogDate(args[0])) { const month = args[0].month @@ -71,9 +73,7 @@ export const STL: Record = { }, toString: { fn: STLToString, minArgs: 1, maxArgs: 1 }, toUUID: { - fn: (args) => { - return String(args[0]) - }, + fn: STLToString, minArgs: 1, maxArgs: 1, }, @@ -148,8 +148,8 @@ export const STL: Record = { }, tuple: { fn: (args) => { - const tuple = args.slice() - ;(tuple as any).__isHogTuple = true + const tuple = args.slice(); + (tuple as any).__isHogTuple = true return tuple }, minArgs: 0, diff --git a/hogvm/typescript/src/utils.ts b/hogvm/typescript/src/utils.ts index 66c934f1e48..86aeaa1c071 100644 --- a/hogvm/typescript/src/utils.ts +++ b/hogvm/typescript/src/utils.ts @@ -36,6 +36,7 @@ export function like( pattern = String(pattern) .replaceAll(/[-/\\^$*+?.()|[\]{}]/g, '\\$&') .replaceAll('%', '.*') + .replaceAll('_', '.') if (match) { return match((caseInsensitive ? '(?i)' : '') + pattern, string) } diff --git a/package.json b/package.json index c8b74b26479..a0de2a80f07 100644 --- a/package.json +++ b/package.json @@ -54,8 +54,8 @@ "typegen:check": "kea-typegen check", "typegen:watch": "kea-typegen watch --delete --show-ts-errors", "typegen:clean": "find frontend/src -type f -name '*Type.ts' -delete", - "storybook": "storybook dev -p 6006", - "build-storybook": "storybook build", + "storybook": "DEBUG=0 storybook dev -p 6006", + "build-storybook": "DEBUG=0 storybook build", "dev:migrate:postgres": "export DEBUG=1 && source env/bin/activate && python manage.py migrate", "dev:migrate:clickhouse": "export DEBUG=1 && source env/bin/activate && python manage.py migrate_clickhouse", "prepare": "husky install", @@ -76,8 +76,8 @@ "@medv/finder": "^3.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.6.0", - "@posthog/hogvm": "^1.0.58", - "@posthog/icons": "0.8.5", + "@posthog/hogvm": "^1.0.59", + "@posthog/icons": "0.9.1", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", "@rrweb/types": "2.0.0-alpha.13", @@ -154,7 +154,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.186.0", + "posthog-js": "1.186.3", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/package.json b/plugin-server/package.json index f116ce360ad..a24ecdbf06d 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -13,7 +13,7 @@ "start:dev": "NODE_ENV=dev BASE_DIR=.. nodemon --watch src/ --exec node -r @swc-node/register src/index.ts", "start:devNoWatch": "NODE_ENV=dev BASE_DIR=.. node -r @swc-node/register src/index.ts", "build": "pnpm clean && pnpm compile", - "clean": "rm -rf dist/*", + "clean": "rm -rf dist/* && rm -rf ../rust/cyclotron-node/index.node", "typescript:compile": "tsc -b", "typescript:check": "tsc --noEmit -p .", "compile": "pnpm typescript:compile", @@ -54,7 +54,7 @@ "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", "@posthog/cyclotron": "file:../rust/cyclotron-node", - "@posthog/hogvm": "^1.0.58", + "@posthog/hogvm": "^1.0.59", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index 4d99a3ab775..088161d2bac 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -47,8 +47,8 @@ dependencies: specifier: file:../rust/cyclotron-node version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.58 - version: 1.0.58(luxon@3.4.4) + specifier: ^1.0.59 + version: 1.0.59(luxon@3.4.4) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3119,8 +3119,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.58(luxon@3.4.4): - resolution: {integrity: sha512-n7NlJWth9WymJWd3w2YOKfq+soxAcycdfjNIVxxniL1bmEL+aI+Nff+MCPKrsv7YLj9qAnyLWBVAw9SZMksB1Q==} + /@posthog/hogvm@1.0.59(luxon@3.4.4): + resolution: {integrity: sha512-4KJfCXUhK7x5Wm3pheKWDmrbQ0y1lWlLWdVEjocdjSy3wOS8hQQqaFAVEKZs7hfk9pZqvNFh2UPgD4ccpwUQjA==} peerDependencies: luxon: ^3.4.4 dependencies: diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ad4f940271f..bca00df0ab1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -50,11 +50,11 @@ dependencies: specifier: 4.6.0 version: 4.6.0(monaco-editor@0.49.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/hogvm': - specifier: ^1.0.58 - version: 1.0.58(luxon@3.5.0) + specifier: ^1.0.59 + version: 1.0.59(luxon@3.5.0) '@posthog/icons': - specifier: 0.8.5 - version: 0.8.5(react-dom@18.2.0)(react@18.2.0) + specifier: 0.9.1 + version: 0.9.1(react-dom@18.2.0)(react@18.2.0) '@posthog/plugin-scaffold': specifier: ^1.4.4 version: 1.4.4 @@ -284,8 +284,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.186.0 - version: 1.186.0 + specifier: 1.186.3 + version: 1.186.3 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -5418,16 +5418,16 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/hogvm@1.0.58(luxon@3.5.0): - resolution: {integrity: sha512-n7NlJWth9WymJWd3w2YOKfq+soxAcycdfjNIVxxniL1bmEL+aI+Nff+MCPKrsv7YLj9qAnyLWBVAw9SZMksB1Q==} + /@posthog/hogvm@1.0.59(luxon@3.5.0): + resolution: {integrity: sha512-4KJfCXUhK7x5Wm3pheKWDmrbQ0y1lWlLWdVEjocdjSy3wOS8hQQqaFAVEKZs7hfk9pZqvNFh2UPgD4ccpwUQjA==} peerDependencies: luxon: ^3.4.4 dependencies: luxon: 3.5.0 dev: false - /@posthog/icons@0.8.5(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-bFPMgnR3ZaNnMQ81OznYFQRd7KaCqXcI8xS3qS49UBkSZpKeJgH86JbWXBXI2q2GZWX00gc+gZxEo5EBkY7KcQ==} + /@posthog/icons@0.9.1(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-9zlU1H7MZm2gSh1JsDzM25km6VDc/Y7HdNf6RyP5sUiHCHVMKhQQ8TA2IMq55v/uTFRc5Yen6BagOUvunD2kqQ==} peerDependencies: react: '>=16.14.0' react-dom: '>=16.14.0' @@ -6579,7 +6579,7 @@ packages: '@storybook/client-logger': 7.6.20 '@storybook/core-events': 7.6.20 '@storybook/global': 5.0.0 - qs: 6.13.0 + qs: 6.13.1 telejson: 7.2.0 tiny-invariant: 1.3.3 dev: true @@ -7093,7 +7093,7 @@ packages: dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 - qs: 6.13.0 + qs: 6.13.1 synchronous-promise: 2.0.17 ts-dedent: 2.2.0 util-deprecate: 1.0.2 @@ -7253,7 +7253,7 @@ packages: dependencies: '@storybook/client-logger': 7.6.20 memoizerific: 1.11.3 - qs: 6.13.0 + qs: 6.13.1 dev: true /@storybook/router@7.6.4: @@ -17789,8 +17789,8 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.186.0: - resolution: {integrity: sha512-WagGNrDtvyOhmX1Gtf1hJQMBy1mB1vx9gtC6BKEfJi2pvEFtQuAzQ9c/tMUTmY0o2ZF5ZBFiZ2IRs4kbFLMvPQ==} + /posthog-js@1.186.3: + resolution: {integrity: sha512-imtKUMccPgqw5H5Eo1hf23ezzSO/1rhYycglgfh7jVx7dslsj9QxMgBscH80I96rwTgtZOSQcX+ka4bVPHFBYA==} dependencies: core-js: 3.39.0 fflate: 0.4.8 @@ -18168,8 +18168,8 @@ packages: side-channel: 1.0.6 dev: true - /qs@6.13.0: - resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==} + /qs@6.13.1: + resolution: {integrity: sha512-EJPeIn0CYrGu+hli1xilKAPXODtJ12T0sP63Ijx2/khC2JtuaN3JyNIpvmnkmaEtha9ocbG4A4cMcr+TvqvwQg==} engines: {node: '>=0.6'} dependencies: side-channel: 1.0.6 diff --git a/posthog/api/early_access_feature.py b/posthog/api/early_access_feature.py index 57885666fde..004725393b4 100644 --- a/posthog/api/early_access_feature.py +++ b/posthog/api/early_access_feature.py @@ -203,6 +203,7 @@ class EarlyAccessFeatureSerializerCreateOnly(EarlyAccessFeatureSerializer): "key": feature_flag_key, "name": f"Feature Flag for Feature {validated_data['name']}", "filters": filters, + "creation_context": "early_access_features", }, context=self.context, ) diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 0ae7331437b..df94e997790 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -118,6 +118,14 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo ) can_edit = serializers.SerializerMethodField() + CREATION_CONTEXT_CHOICES = ("feature_flags", "experiments", "surveys", "early_access_features", "web_experiments") + creation_context = serializers.ChoiceField( + choices=CREATION_CONTEXT_CHOICES, + write_only=True, + required=False, + help_text="Indicates the origin product of the feature flag. Choices: 'feature_flags', 'experiments', 'surveys', 'early_access_features', 'web_experiments'.", + ) + class Meta: model = FeatureFlag fields = [ @@ -142,6 +150,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo "usage_dashboard", "analytics_dashboards", "has_enriched_analytics", + "creation_context", ] def get_can_edit(self, feature_flag: FeatureFlag) -> bool: @@ -320,6 +329,9 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo validated_data["created_by"] = request.user validated_data["team_id"] = self.context["team_id"] tags = validated_data.pop("tags", None) # tags are created separately below as global tag relationships + creation_context = validated_data.pop( + "creation_context", "feature_flags" + ) # default to "feature_flags" if an alternative value is not provided self._update_filters(validated_data) @@ -350,7 +362,9 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo _create_usage_dashboard(instance, request.user) - report_user_action(request.user, "feature flag created", instance.get_analytics_metadata()) + analytics_metadata = instance.get_analytics_metadata() + analytics_metadata["creation_context"] = creation_context + report_user_action(request.user, "feature flag created", analytics_metadata) return instance diff --git a/posthog/api/hog.py b/posthog/api/hog.py index 6ee0f94506a..d9d4c351669 100644 --- a/posthog/api/hog.py +++ b/posthog/api/hog.py @@ -6,7 +6,7 @@ from rest_framework.response import Response from hogql_parser import parse_program from posthog.api.mixins import PydanticModelMixin from posthog.api.routing import TeamAndOrgViewSetMixin -from posthog.hogql.bytecode import create_bytecode, Local +from posthog.hogql.compiler.bytecode import create_bytecode, Local from posthog.hogql.errors import ExposedHogQLError from posthog.schema import HogCompileResponse diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py index b98bb979bfc..2d61a92d34d 100644 --- a/posthog/api/services/query.py +++ b/posthog/api/services/query.py @@ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError from hogvm.python.debugger import color_bytecode from posthog.clickhouse.query_tagging import tag_queries from posthog.cloud_utils import is_cloud -from posthog.hogql.bytecode import execute_hog +from posthog.hogql.compiler.bytecode import execute_hog from posthog.hogql.constants import LimitContext from posthog.hogql.context import HogQLContext from posthog.hogql.database.database import create_hogql_database, serialize_database diff --git a/posthog/api/survey.py b/posthog/api/survey.py index 1cd91b881c0..3d13981a867 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -640,6 +640,7 @@ class SurveySerializerCreateUpdateOnly(serializers.ModelSerializer): "name": f"Targeting flag for survey {name}", "filters": filters, "active": active, + "creation_context": "surveys", }, context=self.context, ) diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index d8c23601366..177849ed4b9 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -1841,7 +1841,8 @@ "posthog_experiment"."archived", "posthog_experiment"."type", "posthog_experiment"."variants", - "posthog_experiment"."metrics" + "posthog_experiment"."metrics", + "posthog_experiment"."metrics_secondary" FROM "posthog_experiment" WHERE "posthog_experiment"."exposure_cohort_id" = 99999 ''' diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index ae63348de81..aa7aba9ddfb 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -1099,7 +1099,8 @@ "posthog_experiment"."archived", "posthog_experiment"."type", "posthog_experiment"."variants", - "posthog_experiment"."metrics" + "posthog_experiment"."metrics", + "posthog_experiment"."metrics_secondary" FROM "posthog_experiment" WHERE "posthog_experiment"."feature_flag_id" = 99999 ''' diff --git a/posthog/api/test/test_early_access_feature.py b/posthog/api/test/test_early_access_feature.py index 89d1d7369d0..311fbae3cb1 100644 --- a/posthog/api/test/test_early_access_feature.py +++ b/posthog/api/test/test_early_access_feature.py @@ -3,6 +3,7 @@ from unittest.mock import ANY from rest_framework import status from django.core.cache import cache from django.test.client import Client +from unittest.mock import patch from posthog.models.early_access_feature import EarlyAccessFeature from posthog.models import FeatureFlag, Person @@ -520,6 +521,36 @@ class TestEarlyAccessFeature(APIBaseTest): ], } + @patch("posthog.api.feature_flag.report_user_action") + def test_creation_context_is_set_to_early_access_features(self, mock_capture): + response = self.client.post( + f"/api/projects/{self.team.id}/early_access_feature/", + data={ + "name": "Hick bondoogling", + "description": 'Boondoogle your hicks with one click. Just click "bazinga"!', + "stage": "concept", + }, + format="json", + ) + response_data = response.json() + ff_instance = FeatureFlag.objects.get(id=response_data["feature_flag"]["id"]) + mock_capture.assert_called_once_with( + ANY, + "feature flag created", + { + "groups_count": 1, + "has_variants": False, + "variants_count": 0, + "has_rollout_percentage": False, + "has_filters": False, + "filter_count": 0, + "created_at": ff_instance.created_at, + "aggregating_by_groups": False, + "payload_count": 0, + "creation_context": "early_access_features", + }, + ) + class TestPreviewList(BaseTest, QueryMatchingTest): def setUp(self): diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index b5a2cfd6d18..2d4745313b9 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -300,6 +300,7 @@ class TestFeatureFlag(APIBaseTest, ClickhouseTestMixin): "created_at": instance.created_at, "aggregating_by_groups": True, "payload_count": 0, + "creation_context": "feature_flags", }, ) @@ -334,6 +335,7 @@ class TestFeatureFlag(APIBaseTest, ClickhouseTestMixin): "created_at": instance.created_at, "aggregating_by_groups": False, "payload_count": 0, + "creation_context": "feature_flags", }, ) @@ -385,6 +387,7 @@ class TestFeatureFlag(APIBaseTest, ClickhouseTestMixin): "created_at": instance.created_at, "aggregating_by_groups": False, "payload_count": 0, + "creation_context": "feature_flags", }, ) @@ -438,6 +441,7 @@ class TestFeatureFlag(APIBaseTest, ClickhouseTestMixin): "created_at": instance.created_at, "aggregating_by_groups": False, "payload_count": 0, + "creation_context": "feature_flags", }, ) diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index cb124c9b970..ee1cc97a696 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -60,6 +60,59 @@ class TestSurvey(APIBaseTest): ] assert response_data["created_by"]["id"] == self.user.id + @patch("posthog.api.feature_flag.report_user_action") + def test_creation_context_is_set_to_surveys(self, mock_capture): + response = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "survey with targeting", + "type": "popover", + "targeting_flag_filters": { + "groups": [ + { + "variant": None, + "rollout_percentage": None, + "properties": [ + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } + ], + } + ] + }, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + response_data = response.json() + + # Ensure that a FeatureFlag has been created + ff_instance = FeatureFlag.objects.get(id=response_data["internal_targeting_flag"]["id"]) + self.assertIsNotNone(ff_instance) + + # Verify that report_user_action was called for the feature flag creation + mock_capture.assert_any_call( + ANY, + "feature flag created", + { + "groups_count": 1, + "has_variants": False, + "variants_count": 0, + "has_rollout_percentage": True, + "has_filters": True, + "filter_count": 2, + "created_at": ff_instance.created_at, + "aggregating_by_groups": False, + "payload_count": 0, + "creation_context": "surveys", + }, + ) + def test_create_adds_user_interactivity_filters(self): response = self.client.post( f"/api/projects/{self.team.id}/surveys/", diff --git a/posthog/api/test/test_web_experiment.py b/posthog/api/test/test_web_experiment.py index 679df4411c7..7b53e2ce0fa 100644 --- a/posthog/api/test/test_web_experiment.py +++ b/posthog/api/test/test_web_experiment.py @@ -1,6 +1,7 @@ from datetime import datetime, timedelta from rest_framework import status +from unittest.mock import ANY, patch from posthog.models import WebExperiment from posthog.test.base import APIBaseTest @@ -30,7 +31,8 @@ class TestWebExperiment(APIBaseTest): format="json", ) - def test_can_create_basic_web_experiment(self): + @patch("posthog.api.feature_flag.report_user_action") + def test_can_create_basic_web_experiment(self, mock_capture): response = self._create_web_experiment() response_data = response.json() assert response.status_code == status.HTTP_201_CREATED, response_data @@ -53,6 +55,22 @@ class TestWebExperiment(APIBaseTest): assert web_experiment.type == "web" assert web_experiment.variants.get("control") is not None assert web_experiment.variants.get("test") is not None + mock_capture.assert_called_once_with( + ANY, + "feature flag created", + { + "groups_count": 1, + "has_variants": True, + "variants_count": 2, + "has_rollout_percentage": True, + "has_filters": False, + "filter_count": 0, + "created_at": linked_flag.created_at, + "aggregating_by_groups": False, + "payload_count": 0, + "creation_context": "web_experiments", + }, + ) def test_can_list_active_web_experiments(self): response = self._create_web_experiment("active_web_experiment") diff --git a/posthog/api/web_experiment.py b/posthog/api/web_experiment.py index 81aae23f2da..d90d400404d 100644 --- a/posthog/api/web_experiment.py +++ b/posthog/api/web_experiment.py @@ -98,6 +98,7 @@ class WebExperimentsAPISerializer(serializers.ModelSerializer): "name": f'Feature Flag for Experiment {validated_data["name"]}', "filters": filters, "active": False, + "creation_context": "web_experiments", }, context=self.context, ) diff --git a/posthog/cdp/filters.py b/posthog/cdp/filters.py index 6e655e3338c..04fde83e5c5 100644 --- a/posthog/cdp/filters.py +++ b/posthog/cdp/filters.py @@ -1,6 +1,6 @@ from typing import Optional from posthog.models.action.action import Action -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.hogql.parser import parse_expr from posthog.hogql.property import action_to_expr, property_to_expr, ast from posthog.models.team.team import Team diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index 2bf9f517aee..43917a62e75 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -38,6 +38,7 @@ from .google_cloud_storage.template_google_cloud_storage import ( TemplateGoogleCloudStorageMigrator, ) from .airtable.template_airtable import template as airtable +from .brevo.template_brevo import template as brevo from ._internal.template_broadcast import template_new_broadcast as _broadcast HOG_FUNCTION_TEMPLATES = [ @@ -50,6 +51,7 @@ HOG_FUNCTION_TEMPLATES = [ avo, aws_kinesis, braze, + brevo, clearbit, customerio, discord, diff --git a/posthog/cdp/templates/brevo/template_brevo.py b/posthog/cdp/templates/brevo/template_brevo.py new file mode 100644 index 00000000000..89ae28007c6 --- /dev/null +++ b/posthog/cdp/templates/brevo/template_brevo.py @@ -0,0 +1,81 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + type="destination", + id="template-brevo", + name="Brevo", + description="Update contacts in Brevo", + icon_url="/static/services/brevo.png", + category=["Email Marketing"], + hog=""" +if (empty(inputs.email)) { + print('No email set. Skipping...') + return +} + +let body := { + 'email': inputs.email, + 'updateEnabled': true, + 'attributes': {} +} + +for (let key, value in inputs.attributes) { + if (not empty(value)) { + body.attributes[key] := value + } +} + +let res := fetch(f'https://api.brevo.com/v3/contacts', { + 'method': 'POST', + 'headers': { + 'api-key': inputs.apiKey, + 'Content-Type': 'application/json', + }, + 'body': body +}) +if (res.status >= 400) { + throw Error(f'Error from api.brevo.com (status {res.status}): {res.body}') +} +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Brevo API Key", + "description": "Check out this page on how to get your API key: https://help.brevo.com/hc/en-us/articles/209467485-Create-and-manage-your-API-keys", + "secret": True, + "required": True, + }, + { + "key": "email", + "type": "string", + "label": "Email of the user", + "description": "Where to find the email for the contact to be created. You can use the filters section to filter out unwanted emails or internal users.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "attributes", + "type": "dictionary", + "label": "Attributes", + "description": "For information on potential attributes, refer to the following page: https://help.brevo.com/hc/en-us/articles/10617359589906-Create-and-manage-contact-attributes", + "default": { + "EMAIL": "{person.properties.email}", + "FIRSTNAME": "{person.properties.firstname}", + "LASTNAME": "{person.properties.lastname}", + }, + "secret": False, + "required": True, + }, + ], + filters={ + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$set", "name": "$set", "type": "events", "order": 0}, + ], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/brevo/test_template_brevo.py b/posthog/cdp/templates/brevo/test_template_brevo.py new file mode 100644 index 00000000000..839dabbc5a3 --- /dev/null +++ b/posthog/cdp/templates/brevo/test_template_brevo.py @@ -0,0 +1,46 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.brevo.template_brevo import ( + template as template_brevo, +) + + +def create_inputs(**kwargs): + inputs = { + "apiKey": "apikey12345", + "email": "max@posthog.com", + "attributes": {"EMAIL": "max@posthog.com", "FIRSTNAME": "Max"}, + } + inputs.update(kwargs) + + return inputs + + +class TestTemplateBrevo(BaseHogFunctionTemplateTest): + template = template_brevo + + def test_function_works(self): + self.run_function(inputs=create_inputs()) + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.brevo.com/v3/contacts", + { + "method": "POST", + "headers": { + "api-key": "apikey12345", + "Content-Type": "application/json", + }, + "body": { + "email": "max@posthog.com", + "updateEnabled": True, + "attributes": {"EMAIL": "max@posthog.com", "FIRSTNAME": "Max"}, + }, + }, + ) + ) + + def test_function_requires_identifier(self): + self.run_function(inputs=create_inputs(email="")) + + assert not self.get_mock_fetch_calls() + assert self.get_mock_print_calls() == snapshot([("No email set. Skipping...",)]) diff --git a/posthog/cdp/test/test_filters.py b/posthog/cdp/test/test_filters.py index 978b9ee2a6f..fecb983aa5b 100644 --- a/posthog/cdp/test/test_filters.py +++ b/posthog/cdp/test/test_filters.py @@ -3,7 +3,7 @@ from inline_snapshot import snapshot from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.cdp.filters import hog_function_filters_to_expr -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.models.action.action import Action from posthog.test.base import APIBaseTest, ClickhouseTestMixin, QueryMatchingTest diff --git a/posthog/cdp/validation.py b/posthog/cdp/validation.py index 321f135614e..0b2bbe2237d 100644 --- a/posthog/cdp/validation.py +++ b/posthog/cdp/validation.py @@ -2,7 +2,7 @@ import logging from typing import Any, Optional from rest_framework import serializers -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.hogql.parser import parse_program, parse_string_template logger = logging.getLogger(__name__) diff --git a/posthog/demo/matrix/manager.py b/posthog/demo/matrix/manager.py index 08f45ef163e..8c69ef06a8d 100644 --- a/posthog/demo/matrix/manager.py +++ b/posthog/demo/matrix/manager.py @@ -137,6 +137,7 @@ class MatrixManager: ) for cohort in Cohort.objects.filter(team=team): cohort.calculate_people_ch(pending_version=0) + team.project.save() team.save() def _save_analytics_data(self, data_team: Team): diff --git a/posthog/demo/products/hedgebox/matrix.py b/posthog/demo/products/hedgebox/matrix.py index 1ae9da06c1f..5990f7a7442 100644 --- a/posthog/demo/products/hedgebox/matrix.py +++ b/posthog/demo/products/hedgebox/matrix.py @@ -98,6 +98,8 @@ class HedgeboxMatrix(Matrix): def set_project_up(self, team, user): super().set_project_up(team, user) + team.project.product_description = "Dropbox for hedgehogs. We're a file sharing and collaboration platform. Free for limited personal use, with paid plans available." + team.autocapture_web_vitals_opt_in = True # Actions interacted_with_file_action = Action.objects.create( @@ -882,6 +884,3 @@ class HedgeboxMatrix(Matrix): ) except IntegrityError: pass # This can happen if demo data generation is re-run for the same project - - # autocapture - team.autocapture_web_vitals_opt_in = True diff --git a/posthog/hogql/cli.py b/posthog/hogql/cli.py index 17b443db06f..36be3d1f086 100644 --- a/posthog/hogql/cli.py +++ b/posthog/hogql/cli.py @@ -2,7 +2,8 @@ import sys import json from hogvm.python.execute import execute_bytecode -from .bytecode import create_bytecode, parse_program +from posthog.hogql.compiler.bytecode import create_bytecode, parse_program +from posthog.hogql.compiler.javascript import to_js_program modifiers = [arg for arg in sys.argv if arg.startswith("-")] args = [arg for arg in sys.argv if arg != "" and not arg.startswith("-")] @@ -14,46 +15,53 @@ if not filename.endswith(".hog") and not filename.endswith(".hoge"): with open(filename) as file: code = file.read() -if filename.endswith(".hog"): - bytecode = create_bytecode(parse_program(code)).bytecode +if "--compile" in modifiers and len(args) == 3 and args[2].endswith(".js"): + target = args[2] + js_program = to_js_program(code) + with open(target, "w") as file: + file.write(js_program + "\n") + else: - bytecode = json.loads(code) - -if "--run" in modifiers: - if len(args) != 2: - raise ValueError("Must specify exactly one filename") - - response = execute_bytecode(bytecode, globals=None, timeout=5, team=None, debug="--debug" in modifiers) - for line in response.stdout: - print(line) # noqa: T201 - -elif "--out" in modifiers: - if len(args) != 2: - raise ValueError("Must specify exactly one filename") - print(json.dumps(bytecode)) # noqa: T201 - -elif "--compile" in modifiers: - if len(args) == 3: - target = args[2] + if filename.endswith(".hog"): + bytecode = create_bytecode(parse_program(code)).bytecode else: - target = filename[:-4] + ".hoge" + bytecode = json.loads(code) + + if "--run" in modifiers: if len(args) != 2: raise ValueError("Must specify exactly one filename") - # write bytecode to file - with open(target, "w") as file: - max_length = 120 - line = "[" - for index, op in enumerate(bytecode): - encoded = json.dumps(op) - if len(line) + len(encoded) > max_length - 2: - file.write(line + "\n") - line = "" - line += (" " if len(line) > 1 else "") + encoded + ("]" if index == len(bytecode) - 1 else ",") - if line == "[": - file.write(line + "]\n") - elif line != "": - file.write(line + "\n") + response = execute_bytecode(bytecode, globals=None, timeout=5, team=None, debug="--debug" in modifiers) + for line in response.stdout: + print(line) # noqa: T201 -else: - raise ValueError("Must specify either --run or --compile") + elif "--out" in modifiers: + if len(args) != 2: + raise ValueError("Must specify exactly one filename") + print(json.dumps(bytecode)) # noqa: T201 + + elif "--compile" in modifiers: + if len(args) == 3: + target = args[2] + else: + target = filename[:-4] + ".hoge" + if len(args) != 2: + raise ValueError("Must specify exactly one filename") + + # write bytecode to file + with open(target, "w") as file: + max_length = 120 + line = "[" + for index, op in enumerate(bytecode): + encoded = json.dumps(op) + if len(line) + len(encoded) > max_length - 2: + file.write(line + "\n") + line = "" + line += (" " if len(line) > 1 else "") + encoded + ("]" if index == len(bytecode) - 1 else ",") + if line == "[": + file.write(line + "]\n") + elif line != "": + file.write(line + "\n") + + else: + raise ValueError("Must specify either --run or --compile") diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/compiler/bytecode.py similarity index 100% rename from posthog/hogql/bytecode.py rename to posthog/hogql/compiler/bytecode.py diff --git a/posthog/hogql/compiler/javascript.py b/posthog/hogql/compiler/javascript.py new file mode 100644 index 00000000000..16989c6117e --- /dev/null +++ b/posthog/hogql/compiler/javascript.py @@ -0,0 +1,572 @@ +import dataclasses +import json +import re +from enum import StrEnum +from typing import Any, Optional + +from posthog.hogql import ast +from posthog.hogql.base import AST +from posthog.hogql.compiler.javascript_stl import STL_FUNCTIONS, import_stl_functions +from posthog.hogql.errors import QueryError, NotImplementedError +from posthog.hogql.parser import parse_expr, parse_program +from posthog.hogql.visitor import Visitor + +_JS_GET_GLOBAL = "__getGlobal" +_JS_KEYWORDS = { + "await", + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "enum", + "export", + "extends", + "false", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "null", + "return", + "super", + "switch", + "this", + "throw", + "true", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + "implements", + "interface", + "let", + "package", + "private", + "protected", + "public", + "static", + "arguments", + "eval", + "Error", + _JS_GET_GLOBAL, # don't let this get overridden +} + + +@dataclasses.dataclass +class Local: + name: str + depth: int + + +def to_js_program(code: str) -> str: + compiler = JavaScriptCompiler() + code = compiler.visit(parse_program(code)) + imports = compiler.get_inlined_stl() + return imports + ("\n\n" if imports else "") + code + + +def to_js_expr(expr: str) -> str: + return JavaScriptCompiler().visit(parse_expr(expr)) + + +def _as_block(node: ast.Statement) -> ast.Block: + if isinstance(node, ast.Block): + return node + return ast.Block(declarations=[node]) + + +def _sanitize_identifier(name: str | int) -> str: + name = str(name) + if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", name): + if name in _JS_KEYWORDS: + return f"__x_{name}" + if name.startswith("__x_"): + # add a second __x_ to avoid conflicts with our internal variables + return f"__x_{name}" + return name + else: + return f"[{json.dumps(name)}]" + + +class JavaScriptCompiler(Visitor): + def __init__( + self, + args: Optional[list[str]] = None, + locals: Optional[list[Local]] = None, + ): + super().__init__() + self.locals: list[Local] = locals or [] + self.scope_depth = 0 + self.args = args or [] + self.indent_level = 0 + self.inlined_stl: set[str] = set() + + # Initialize locals with function arguments + for arg in self.args: + self._declare_local(arg) + + def get_inlined_stl(self) -> str: + return import_stl_functions(self.inlined_stl) + + def _start_scope(self): + self.scope_depth += 1 + + def _end_scope(self): + self.locals = [local for local in self.locals if local.depth < self.scope_depth] + self.scope_depth -= 1 + + def _declare_local(self, name: str): + for local in reversed(self.locals): + if local.depth == self.scope_depth and local.name == name: + raise QueryError(f"Variable `{name}` already declared in this scope") + self.locals.append(Local(name=name, depth=self.scope_depth)) + + def _indent(self, code: str) -> str: + indentation = " " * self.indent_level + return "\n".join(indentation + line if line else "" for line in code.split("\n")) + + def visit_and(self, node: ast.And): + code = " && ".join([self.visit(expr) for expr in node.exprs]) + return f"!!({code})" + + def visit_or(self, node: ast.Or): + code = " || ".join([self.visit(expr) for expr in node.exprs]) + return f"!!({code})" + + def visit_not(self, node: ast.Not): + expr_code = self.visit(node.expr) + return f"(!{expr_code})" + + def visit_compare_operation(self, node: ast.CompareOperation): + left_code = self.visit(node.left) + right_code = self.visit(node.right) + op = node.op + + op_map = { + ast.CompareOperationOp.Eq: "==", + ast.CompareOperationOp.NotEq: "!=", + ast.CompareOperationOp.Gt: ">", + ast.CompareOperationOp.GtEq: ">=", + ast.CompareOperationOp.Lt: "<", + ast.CompareOperationOp.LtEq: "<=", + } + + if op in op_map: + return f"({left_code} {op_map[op]} {right_code})" + elif op == ast.CompareOperationOp.In: + return f"({right_code}.includes({left_code}))" + elif op == ast.CompareOperationOp.NotIn: + return f"(!{right_code}.includes({left_code}))" + elif op == ast.CompareOperationOp.Like: + self.inlined_stl.add("like") + return f"like({left_code}, {right_code})" + elif op == ast.CompareOperationOp.ILike: + self.inlined_stl.add("ilike") + return f"ilike({left_code}, {right_code})" + elif op == ast.CompareOperationOp.NotLike: + self.inlined_stl.add("like") + return f"!like({left_code}, {right_code})" + elif op == ast.CompareOperationOp.NotILike: + self.inlined_stl.add("ilike") + return f"!ilike({left_code}, {right_code})" + elif op == ast.CompareOperationOp.Regex: + # TODO: re2? + return f"new RegExp({right_code}).test({left_code})" + elif op == ast.CompareOperationOp.IRegex: + return f'new RegExp({right_code}, "i").test({left_code})' + elif op == ast.CompareOperationOp.NotRegex: + return f"!(new RegExp({right_code}).test({left_code}))" + elif op == ast.CompareOperationOp.NotIRegex: + return f'!(new RegExp({right_code}, "i").test({left_code}))' + elif op == ast.CompareOperationOp.InCohort or op == ast.CompareOperationOp.NotInCohort: + cohort_name = "" + if isinstance(node.right, ast.Constant): + if isinstance(node.right.value, int): + cohort_name = f" (cohort id={node.right.value})" + else: + cohort_name = f" (cohort: {str(node.right.value)})" + raise QueryError( + f"Can't use cohorts in real-time filters. Please inline the relevant expressions{cohort_name}." + ) + else: + raise QueryError(f"Unsupported comparison operator: {op}") + + def visit_arithmetic_operation(self, node: ast.ArithmeticOperation): + left_code = self.visit(node.left) + right_code = self.visit(node.right) + op_map = { + ast.ArithmeticOperationOp.Add: "+", + ast.ArithmeticOperationOp.Sub: "-", + ast.ArithmeticOperationOp.Mult: "*", + ast.ArithmeticOperationOp.Div: "/", + ast.ArithmeticOperationOp.Mod: "%", + } + op_str = op_map[node.op] + return f"({left_code} {op_str} {right_code})" + + def visit_field(self, node: ast.Field): + found_local = any(local.name == str(node.chain[0]) for local in self.locals) + array_code = "" + for index, element in enumerate(node.chain): + if index == 0: + if found_local: + array_code = _sanitize_identifier(element) + elif element in STL_FUNCTIONS: + self.inlined_stl.add(str(element)) + array_code = f"{_sanitize_identifier(element)}" + else: + array_code = f"{_JS_GET_GLOBAL}({json.dumps(element)})" + continue + + if (isinstance(element, int) and not isinstance(element, bool)) or isinstance(element, str): + self.inlined_stl.add("__getProperty") + array_code = f"__getProperty({array_code}, {json.dumps(element)}, true)" + else: + raise QueryError(f"Unsupported element: {element} ({type(element)})") + return array_code + + def visit_tuple_access(self, node: ast.TupleAccess): + tuple_code = self.visit(node.tuple) + index_code = str(node.index) + self.inlined_stl.add("__getProperty") + return f"__getProperty({tuple_code}, {index_code}, {json.dumps(node.nullish)})" + + def visit_array_access(self, node: ast.ArrayAccess): + array_code = self.visit(node.array) + property_code = self.visit(node.property) + self.inlined_stl.add("__getProperty") + return f"__getProperty({array_code}, {property_code}, {json.dumps(node.nullish)})" + + def visit_constant(self, node: ast.Constant): + value = node.value + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "null" + elif isinstance(value, int | float | str): + return json.dumps(value) + else: + raise QueryError(f"Unsupported constant type: {type(value)}") + + def visit_call(self, node: ast.Call): + if node.params is not None: + return self.visit(ast.ExprCall(expr=ast.Call(name=node.name, args=node.params), args=node.args or [])) + + # Handle special functions + if node.name == "not" and len(node.args) == 1: + expr_code = self.visit(node.args[0]) + return f"(!{expr_code})" + if node.name == "and" and len(node.args) > 1: + exprs_code = " && ".join([self.visit(arg) for arg in node.args]) + return f"({exprs_code})" + if node.name == "or" and len(node.args) > 1: + exprs_code = " || ".join([self.visit(arg) for arg in node.args]) + return f"({exprs_code})" + if node.name == "if" and len(node.args) >= 2: + condition_code = self.visit(node.args[0]) + then_code = self.visit(node.args[1]) + else_code = self.visit(node.args[2]) if len(node.args) == 3 else "null" + return f"({condition_code} ? {then_code} : {else_code})" + if node.name == "multiIf" and len(node.args) >= 2: + + def build_nested_if(args): + condition_code = self.visit(args[0]) + then_code = self.visit(args[1]) + if len(args) == 2: + return f"({condition_code} ? {then_code} : null)" + elif len(args) == 3: + else_code = self.visit(args[2]) + return f"({condition_code} ? {then_code} : {else_code})" + else: + else_code = build_nested_if(args[2:]) + return f"({condition_code} ? {then_code} : {else_code})" + + return build_nested_if(node.args) + if node.name == "ifNull" and len(node.args) == 2: + expr_code = self.visit(node.args[0]) + if_null_code = self.visit(node.args[1]) + return f"({expr_code} ?? {if_null_code})" + + if node.name in STL_FUNCTIONS: + self.inlined_stl.add(node.name) + name = _sanitize_identifier(node.name) + args_code = ", ".join(self.visit(arg) for arg in node.args) + return f"{name}({args_code})" + else: + # Regular function calls + name = _sanitize_identifier(node.name) + args_code = ", ".join([self.visit(arg) for arg in node.args or []]) + return f"{name}({args_code})" + + def visit_expr_call(self, node: ast.ExprCall): + func_code = self.visit(node.expr) + args_code = ", ".join([self.visit(arg) for arg in node.args]) + return f"{func_code}({args_code})" + + def visit_program(self, node: ast.Program): + code_lines = [] + self._start_scope() + for declaration in node.declarations: + code = self.visit(declaration) + code_lines.append(self._indent(code)) + self._end_scope() + return "\n".join(code_lines) + + def visit_block(self, node: ast.Block): + code_lines = [] + self._start_scope() + self.indent_level += 1 + for declaration in node.declarations: + code = self.visit(declaration) + code_lines.append(self._indent(code)) + self.indent_level -= 1 + self._end_scope() + return "{\n" + "\n".join(code_lines) + "\n" + (" " * self.indent_level) + "}" + + def visit_expr_statement(self, node: ast.ExprStatement): + if node.expr is None: + return "" + expr_code = self.visit(node.expr) + return expr_code + ";" + + def visit_return_statement(self, node: ast.ReturnStatement): + if node.expr: + return f"return {self.visit(node.expr)};" + else: + return "return null;" + + def visit_throw_statement(self, node: ast.ThrowStatement): + return f"throw {self.visit(node.expr)};" + + def visit_try_catch_statement(self, node: ast.TryCatchStatement): + try_code = self.visit(_as_block(node.try_stmt)) + code = "try " + try_code + " catch (__error) { " + has_catch_all = False + for index, catch in enumerate(node.catches): + catch_var = catch[0] or "e" + self._start_scope() + self._declare_local(catch_var) + catch_type = str(catch[1]) if catch[1] is not None else None + catch_declarations = _as_block(catch[2]) + catch_code = "".join(self._indent(self.visit(d)) for d in catch_declarations.declarations) + if index > 0: + code += " else " + if catch_type is not None and catch_type != "Error": + code += ( + f"if (__error.type === {json.dumps(catch_type)}) {{ let {_sanitize_identifier(catch_var)} = __error;\n" + f"{catch_code}\n" + f"}}\n" + ) + else: + has_catch_all = True + code += f"if (true) {{ let {_sanitize_identifier(catch_var)} = __error;\n" f"{catch_code}\n" f"}}\n" + self._end_scope() + if not has_catch_all: + code += " else { throw __error; }" + code += "}" + + if node.finally_stmt: + finally_code = self.visit(_as_block(node.finally_stmt)) + code += " finally " + finally_code + return code + + def visit_if_statement(self, node: ast.IfStatement): + expr_code = self.visit(node.expr) + then_code = self.visit(_as_block(node.then)) + code = f"if ({expr_code}) {then_code}" + if node.else_: + else_code = self.visit(_as_block(node.else_)) + code += f" else {else_code}" + return code + + def visit_while_statement(self, node: ast.WhileStatement): + expr_code = self.visit(node.expr) + body_code = self.visit(_as_block(node.body)) + return f"while ({expr_code}) {body_code}" + + def visit_for_statement(self, node: ast.ForStatement): + self._start_scope() + init_code = self.visit(node.initializer) if node.initializer else "" + init_code = init_code[:-1] if init_code.endswith(";") else init_code + condition_code = self.visit(node.condition) if node.condition else "" + condition_code = condition_code[:-1] if condition_code.endswith(";") else condition_code + increment_code = self.visit(node.increment) if node.increment else "" + increment_code = increment_code[:-1] if increment_code.endswith(";") else increment_code + body_code = self.visit(_as_block(node.body)) + self._end_scope() + return f"for ({init_code}; {condition_code}; {increment_code}) {body_code}" + + def visit_for_in_statement(self, node: ast.ForInStatement): + expr_code = self.visit(node.expr) + if node.keyVar and node.valueVar: + self._start_scope() + self._declare_local(node.keyVar) + self._declare_local(node.valueVar) + body_code = self.visit(_as_block(node.body)) + self.inlined_stl.add("keys") + resp = f"for (let {_sanitize_identifier(node.keyVar)} of keys({expr_code})) {{ let {_sanitize_identifier(node.valueVar)} = {expr_code}[{_sanitize_identifier(node.keyVar)}]; {body_code} }}" + self._end_scope() + return resp + elif node.valueVar: + self._start_scope() + self._declare_local(node.valueVar) + body_code = self.visit(_as_block(node.body)) + self.inlined_stl.add("values") + resp = f"for (let {_sanitize_identifier(node.valueVar)} of values({expr_code})) {body_code}" + self._end_scope() + return resp + else: + raise QueryError("ForInStatement requires at least a valueVar") + + def visit_variable_declaration(self, node: ast.VariableDeclaration): + self._declare_local(node.name) + if node.expr: + expr_code = self.visit(node.expr) + return f"let {_sanitize_identifier(node.name)} = {expr_code};" + else: + return f"let {_sanitize_identifier(node.name)};" + + def visit_variable_assignment(self, node: ast.VariableAssignment): + if isinstance(node.left, ast.TupleAccess): + tuple_code = self.visit(node.left.tuple) + index = node.left.index + right_code = self.visit(node.right) + self.inlined_stl.add("__setProperty") + return f"__setProperty({tuple_code}, {index}, {right_code});" + + elif isinstance(node.left, ast.ArrayAccess): + array_code = self.visit(node.left.array) + property_code = self.visit(node.left.property) + right_code = self.visit(node.right) + self.inlined_stl.add("__setProperty") + return f"__setProperty({array_code}, {property_code}, {right_code});" + + elif isinstance(node.left, ast.Field): + chain = node.left.chain + name = chain[0] + is_local = any(local.name == name for local in self.locals) + + if is_local: + array_code = "" + for index, element in enumerate(chain): + if index == 0: + array_code = _sanitize_identifier(element) + if len(chain) == 1: + array_code = f"{array_code} = {self.visit(node.right)}" + elif (isinstance(element, int) and not isinstance(element, bool)) or isinstance(element, str): + if index == len(chain) - 1: + right_code = self.visit(node.right) + self.inlined_stl.add("__setProperty") + array_code = f"__setProperty({array_code}, {json.dumps(element)}, {right_code})" + else: + self.inlined_stl.add("__getProperty") + array_code = f"__getProperty({array_code}, {json.dumps(element)}, true)" + else: + raise QueryError(f"Unsupported element: {element} ({type(element)})") + return array_code + + else: + # Cannot assign to undeclared variables or globals + raise QueryError(f'Variable "{name}" not declared in this scope. Cannot assign to globals.') + + else: + left_code = self.visit(node.left) + right_code = self.visit(node.right) + return f"{left_code} = {right_code};" + + def visit_function(self, node: ast.Function): + self._declare_local(_sanitize_identifier(node.name)) + params_code = ", ".join(_sanitize_identifier(p) for p in node.params) + self._start_scope() + for arg in node.params: + self._declare_local(arg) + if isinstance(node.body, ast.Placeholder): + body_code = ast.Block(declarations=[ast.ExprStatement(expr=node.body.expr), ast.ReturnStatement(expr=None)]) + else: + body_code = self.visit(_as_block(node.body)) + self._end_scope() + return f"function {_sanitize_identifier(node.name)}({params_code}) {body_code}" + + def visit_lambda(self, node: ast.Lambda): + params_code = ", ".join(_sanitize_identifier(p) for p in node.args) + self._start_scope() + for arg in node.args: + self._declare_local(arg) + if isinstance(node.expr, ast.Placeholder): + expr_code = self.visit( + ast.Block(declarations=[ast.ExprStatement(expr=node.expr.expr), ast.ReturnStatement(expr=None)]) + ) + else: + expr_code = self.visit(node.expr) + self._end_scope() + self.inlined_stl.add("__lambda") + # we wrap it in __lambda() to make the function anonymous (a true lambda without a name) + return f"__lambda(({params_code}) => {expr_code})" + + def visit_dict(self, node: ast.Dict): + items = [] + for key, value in node.items: + key_code = self.visit(key) + if not isinstance(key, ast.Constant) or not isinstance(key.value, str): + key_code = f"[{key_code}]" + value_code = self.visit(value) + items.append(f"{key_code}: {value_code}") + items_code = ", ".join(items) + return f"{{{items_code}}}" + + def visit_array(self, node: ast.Array): + items_code = ", ".join([self.visit(expr) for expr in node.exprs]) + return f"[{items_code}]" + + def visit_tuple(self, node: ast.Tuple): + items_code = ", ".join([self.visit(expr) for expr in node.exprs]) + self.inlined_stl.add("tuple") + return f"tuple({items_code})" + + def visit_hogqlx_tag(self, node: ast.HogQLXTag): + attrs = [f'"__hx_tag": {json.dumps(node.kind)}'] + for attr in node.attributes: + attrs.append(f'"{attr.name}": {self._visit_hogqlx_value(attr.value)}') + return f'{{{", ".join(attrs)}}}' + + def _visit_hogqlx_value(self, value: Any) -> str: + if isinstance(value, AST): + return self.visit(value) + if isinstance(value, list): + elems = ", ".join([self._visit_hogqlx_value(v) for v in value]) + return f"[{elems}]" + if isinstance(value, dict): + items = ", ".join( + [f"{self._visit_hogqlx_value(k)}: {self._visit_hogqlx_value(v)}" for k, v in value.items()] + ) + return f"{{{items}}}" + if isinstance(value, StrEnum): + return '"' + str(value.value) + '"' + if value is True: + return "true" + if value is False: + return "false" + if isinstance(value, int | float): + return str(value) + if isinstance(value, str): + return json.dumps(value) + return "null" + + def visit_select_query(self, node: ast.SelectQuery): + raise NotImplementedError("JavaScriptCompiler does not support SelectQuery") diff --git a/posthog/hogql/compiler/javascript_stl.py b/posthog/hogql/compiler/javascript_stl.py new file mode 100644 index 00000000000..b1e53d24110 --- /dev/null +++ b/posthog/hogql/compiler/javascript_stl.py @@ -0,0 +1,923 @@ +# TODO: this should be autogenerated from hogvm/typescript/src/stl/* + +STL_FUNCTIONS: dict[str, list[str | list[str]]] = { + "concat": [ + "function concat (...args) { return args.map((arg) => (arg === null ? '' : __STLToString(arg))).join('') }", + ["__STLToString"], + ], + "match": [ + "function match (str, pattern) { return new RegExp(pattern).test(str) }", + [], + ], + "like": [ + "function like (str, pattern) { return __like(str, pattern, false) }", + ["__like"], + ], + "ilike": [ + "function ilike (str, pattern) { return __like(str, pattern, true) }", + ["__like"], + ], + "notLike": [ + "function notLike (str, pattern) { return !__like(str, pattern, false) }", + ["__like"], + ], + "notILike": [ + "function notILike (str, pattern) { return !__like(str, pattern, true) }", + ["__like"], + ], + "toString": [ + "function toString (value) { return __STLToString(value) }", + ["__STLToString"], + ], + "toUUID": [ + "function toUUID (value) { return __STLToString(value) }", + ["__STLToString"], + ], + "toInt": [ + """function toInt(value) { + if (__isHogDateTime(value)) { return Math.floor(value.dt); } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = Math.floor((date - epoch) / (1000 * 60 * 60 * 24)); return diffInDays; } + return !isNaN(parseInt(value)) ? parseInt(value) : null; }""", + ["__isHogDateTime", "__isHogDate"], + ], + "toFloat": [ + """function toFloat(value) { + if (__isHogDateTime(value)) { return value.dt; } + else if (__isHogDate(value)) { const date = new Date(Date.UTC(value.year, value.month - 1, value.day)); const epoch = new Date(Date.UTC(1970, 0, 1)); const diffInDays = (date - epoch) / (1000 * 60 * 60 * 24); return diffInDays; } + return !isNaN(parseFloat(value)) ? parseFloat(value) : null; }""", + ["__isHogDateTime", "__isHogDate"], + ], + "ifNull": [ + "function ifNull (value, defaultValue) { return value !== null ? value : defaultValue } ", + [], + ], + "length": [ + "function length (value) { return value.length }", + [], + ], + "empty": [ + """function empty (value) { + if (typeof value === 'object') { + if (Array.isArray(value)) { return value.length === 0 } else if (value === null) { return true } else if (value instanceof Map) { return value.size === 0 } + return Object.keys(value).length === 0 + } else if (typeof value === 'number' || typeof value === 'boolean') { return false } + return !value }""", + [], + ], + "notEmpty": [ + "function notEmpty (value) { return !empty(value) }", + ["empty"], + ], + "tuple": [ + "function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; }", + [], + ], + "lower": [ + "function lower (value) { return value.toLowerCase() }", + [], + ], + "upper": [ + "function upper (value) { return value.toUpperCase() }", + [], + ], + "reverse": [ + "function reverse (value) { return value.split('').reverse().join('') }", + [], + ], + "print": [ + "function print (...args) { console.log(...args.map(__printHogStringOutput)) }", + ["__printHogStringOutput"], + ], + "jsonParse": [ + """function jsonParse (str) { + function convert(x) { + if (Array.isArray(x)) { return x.map(convert) } + else if (typeof x === 'object' && x !== null) { + if (x.__hogDateTime__) { return __toHogDateTime(x.dt, x.zone) + } else if (x.__hogDate__) { return __toHogDate(x.year, x.month, x.day) + } else if (x.__hogError__) { return __newHogError(x.type, x.message, x.payload) } + const obj = {}; for (const key in x) { obj[key] = convert(x[key]) }; return obj } + return x } + return convert(JSON.parse(str)) }""", + ["__toHogDateTime", "__toHogDate", "__newHogError"], + ], + "jsonStringify": [ + """function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +}""", + ["__isHogDateTime", "__isHogDate", "__isHogError"], + ], + "JSONHas": [ + """function JSONHas (obj, ...path) { + let current = obj + for (const key of path) { + let currentParsed = current + if (typeof current === 'string') { try { currentParsed = JSON.parse(current) } catch (e) { return false } } + if (currentParsed instanceof Map) { if (!currentParsed.has(key)) { return false }; current = currentParsed.get(key) } + else if (typeof currentParsed === 'object' && currentParsed !== null) { + if (typeof key === 'number') { + if (Array.isArray(currentParsed)) { + if (key < 0) { if (key < -currentParsed.length) { return false }; current = currentParsed[currentParsed.length + key] } + else if (key === 0) { return false } + else { if (key > currentParsed.length) { return false }; current = currentParsed[key - 1] } + } else { return false } + } else { + if (!(key in currentParsed)) { return false } + current = currentParsed[key] + } + } else { return false } + } + return true }""", + [], + ], + "isValidJSON": [ + "function isValidJSON (str) { try { JSON.parse(str); return true } catch (e) { return false } }", + [], + ], + "JSONLength": [ + """function JSONLength (obj, ...path) { + try { if (typeof obj === 'string') { obj = JSON.parse(obj) } } catch (e) { return 0 } + if (typeof obj === 'object' && obj !== null) { + const value = __getNestedValue(obj, path, true) + if (Array.isArray(value)) { + return value.length + } else if (value instanceof Map) { + return value.size + } else if (typeof value === 'object' && value !== null) { + return Object.keys(value).length + } + } + return 0 }""", + ["__getNestedValue"], + ], + "JSONExtractBool": [ + """function JSONExtractBool (obj, ...path) { + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch (e) { + return false + } + if (path.length > 0) { + obj = __getNestedValue(obj, path, true) + } + if (typeof obj === 'boolean') { + return obj + } + return false +}""", + ["__getNestedValue"], + ], + "base64Encode": [ + "function base64Encode (str) { return Buffer.from(str).toString('base64') }", + [], + ], + "base64Decode": [ + "function base64Decode (str) { return Buffer.from(str, 'base64').toString() } ", + [], + ], + "tryBase64Decode": [ + "function tryBase64Decode (str) { try { return Buffer.from(str, 'base64').toString() } catch (e) { return '' } }", + [], + ], + "encodeURLComponent": [ + "function encodeURLComponent (str) { return encodeURIComponent(str) }", + [], + ], + "decodeURLComponent": [ + "function decodeURLComponent (str) { return decodeURIComponent(str) }", + [], + ], + "replaceOne": [ + "function replaceOne (str, searchValue, replaceValue) { return str.replace(searchValue, replaceValue) }", + [], + ], + "replaceAll": [ + "function replaceAll (str, searchValue, replaceValue) { return str.replaceAll(searchValue, replaceValue) }", + [], + ], + "position": [ + "function position (str, elem) { if (typeof str === 'string') { return str.indexOf(String(elem)) + 1 } else { return 0 } }", + [], + ], + "positionCaseInsensitive": [ + "function positionCaseInsensitive (str, elem) { if (typeof str === 'string') { return str.toLowerCase().indexOf(String(elem).toLowerCase()) + 1 } else { return 0 } }", + [], + ], + "trim": [ + """function trim (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let start = 0 + while (str[start] === char) { + start++ + } + let end = str.length + while (str[end - 1] === char) { + end-- + } + if (start >= end) { + return '' + } + return str.slice(start, end) +}""", + [], + ], + "trimLeft": [ + """function trimLeft (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let start = 0 + while (str[start] === char) { + start++ + } + return str.slice(start) +}""", + [], + ], + "trimRight": [ + """function trimRight (str, char) { + if (char === null || char === undefined) { + char = ' ' + } + if (char.length !== 1) { + return '' + } + let end = str.length + while (str[end - 1] === char) { + end-- + } + return str.slice(0, end) +}""", + [], + ], + "splitByString": [ + "function splitByString (separator, str, maxSplits) { if (maxSplits === undefined || maxSplits === null) { return str.split(separator) } return str.split(separator, maxSplits) }", + [], + ], + "generateUUIDv4": [ + "function generateUUIDv4 () { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) { const r = (Math.random() * 16) | 0; const v = c === 'x' ? r : (r & 0x3) | 0x8; return v.toString(16) })}", + [], + ], + "sha256Hex": [ + "function sha256Hex (str, options) { return 'SHA256 is not implemented' }", + [], + ], + "md5Hex": [ + """function md5Hex(string) { return 'MD5 is not implemented' }""", + [], + ], + "sha256HmacChainHex": [ + "function sha256HmacChainHex (data, options) { return 'sha256HmacChainHex not implemented' }", + [], + ], + "keys": [ + """function keys (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return Array.from(obj.keys()) } else if (obj instanceof Map) { return Array.from(obj.keys()) } return Object.keys(obj) } return [] }""", + [], + ], + "values": [ + """function values (obj) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { return [...obj] } else if (obj instanceof Map) { return Array.from(obj.values()) } return Object.values(obj) } return [] }""", + [], + ], + "indexOf": [ + "function indexOf (arrOrString, elem) { if (Array.isArray(arrOrString)) { return arrOrString.indexOf(elem) + 1 } else { return 0 } }", + [], + ], + "arrayPushBack": [ + "function arrayPushBack (arr, item) { if (!Array.isArray(arr)) { return [item] } return [...arr, item] }", + [], + ], + "arrayPushFront": [ + "function arrayPushFront (arr, item) { if (!Array.isArray(arr)) { return [item] } return [item, ...arr] }", + [], + ], + "arrayPopBack": [ + "function arrayPopBack (arr) { if (!Array.isArray(arr)) { return [] } return arr.slice(0, arr.length - 1) }", + [], + ], + "arrayPopFront": [ + "function arrayPopFront (arr) { if (!Array.isArray(arr)) { return [] } return arr.slice(1) }", + [], + ], + "arraySort": [ + "function arraySort (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].sort() }", + [], + ], + "arrayReverse": [ + "function arrayReverse (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].reverse() }", + [], + ], + "arrayReverseSort": [ + "function arrayReverseSort (arr) { if (!Array.isArray(arr)) { return [] } return [...arr].sort().reverse() }", + [], + ], + "arrayStringConcat": [ + "function arrayStringConcat (arr, separator = '') { if (!Array.isArray(arr)) { return '' } return arr.join(separator) }", + [], + ], + "arrayCount": [ + "function arrayCount (func, arr) { let count = 0; for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { count = count + 1 } } return count }", + [], + ], + "arrayExists": [ + """function arrayExists (func, arr) { for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { return true } } return false }""", + [], + ], + "arrayFilter": [ + """function arrayFilter (func, arr) { let result = []; for (let i = 0; i < arr.length; i++) { if (func(arr[i])) { result = arrayPushBack(result, arr[i]) } } return result}""", + ["arrayPushBack"], + ], + "arrayMap": [ + """function arrayMap (func, arr) { let result = []; for (let i = 0; i < arr.length; i++) { result = arrayPushBack(result, func(arr[i])) } return result }""", + ["arrayPushBack"], + ], + "has": [ + """function has (arr, elem) { if (!Array.isArray(arr) || arr.length === 0) { return false } return arr.includes(elem) }""", + [], + ], + "now": [ + """function now () { return __now() }""", + ["__now"], + ], + "toUnixTimestamp": [ + """function toUnixTimestamp (input, zone) { return __toUnixTimestamp(input, zone) }""", + ["__toUnixTimestamp"], + ], + "fromUnixTimestamp": [ + """function fromUnixTimestamp (input) { return __fromUnixTimestamp(input) }""", + ["__fromUnixTimestamp"], + ], + "toUnixTimestampMilli": [ + """function toUnixTimestampMilli (input, zone) { return __toUnixTimestampMilli(input, zone) }""", + ["__toUnixTimestampMilli"], + ], + "fromUnixTimestampMilli": [ + """function fromUnixTimestampMilli (input) { return __fromUnixTimestampMilli(input) }""", + ["__fromUnixTimestampMilli"], + ], + "toTimeZone": [ + """function toTimeZone (input, zone) { return __toTimeZone(input, zone) }""", + ["__toTimeZone"], + ], + "toDate": [ + """function toDate (input) { return __toDate(input) }""", + ["__toDate"], + ], + "toDateTime": [ + """function toDateTime (input, zone) { return __toDateTime(input, zone) }""", + ["__toDateTime"], + ], + "formatDateTime": [ + """function formatDateTime (input, format, zone) { return __formatDateTime(input, format, zone) }""", + ["__formatDateTime"], + ], + "HogError": [ + """function HogError (type, message, payload) { return __newHogError(type, message, payload) }""", + ["__newHogError"], + ], + "Error": [ + """function __x_Error (message, payload) { return __newHogError('Error', message, payload) }""", + ["__newHogError"], + ], + "RetryError": [ + """function RetryError (message, payload) { return __newHogError('RetryError', message, payload) }""", + ["__newHogError"], + ], + "NotImplementedError": [ + """function NotImplementedError (message, payload) { return __newHogError('NotImplementedError', message, payload) }""", + ["__newHogError"], + ], + "typeof": [ + """function __x_typeof (value) { + if (value === null || value === undefined) { return 'null' + } else if (__isHogDateTime(value)) { return 'datetime' + } else if (__isHogDate(value)) { return 'date' + } else if (__isHogError(value)) { return 'error' + } else if (typeof value === 'function') { return 'function' + } else if (Array.isArray(value)) { if (value.__isHogTuple) { return 'tuple' } return 'array' + } else if (typeof value === 'object') { return 'object' + } else if (typeof value === 'number') { return Number.isInteger(value) ? 'integer' : 'float' + } else if (typeof value === 'string') { return 'string' + } else if (typeof value === 'boolean') { return 'boolean' } + return 'unknown' +} +""", + ["__isHogDateTime", "__isHogDate", "__isHogError"], + ], + "__DateTimeToString": [ + r"""function __DateTimeToString(dt) { + if (__isHogDateTime(dt)) { + const date = new Date(dt.dt * 1000); + const timeZone = dt.zone || 'UTC'; + const milliseconds = Math.floor(dt.dt * 1000 % 1000); + const options = { timeZone, year: 'numeric', month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false }; + const formatter = new Intl.DateTimeFormat('en-US', options); + const parts = formatter.formatToParts(date); + let year, month, day, hour, minute, second; + for (const part of parts) { + switch (part.type) { + case 'year': year = part.value; break; + case 'month': month = part.value; break; + case 'day': day = part.value; break; + case 'hour': hour = part.value; break; + case 'minute': minute = part.value; break; + case 'second': second = part.value; break; + default: break; + } + } + const getOffset = (date, timeZone) => { + const tzDate = new Date(date.toLocaleString('en-US', { timeZone })); + const utcDate = new Date(date.toLocaleString('en-US', { timeZone: 'UTC' })); + const offset = (tzDate - utcDate) / 60000; // in minutes + const sign = offset >= 0 ? '+' : '-'; + const absOffset = Math.abs(offset); + const hours = Math.floor(absOffset / 60); + const minutes = absOffset % 60; + return `${sign}${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}`; + }; + let offset = 'Z'; + if (timeZone !== 'UTC') { + offset = getOffset(date, timeZone); + } + let isoString = `${year}-${month}-${day}T${hour}:${minute}:${second}`; + isoString += `.${milliseconds.toString().padStart(3, '0')}`; + isoString += offset; + return isoString; + } +} + """, + [], + ], + "__STLToString": [ + r"""function __STLToString(arg) { + if (arg && __isHogDate(arg)) { return `${arg.year}-${arg.month.toString().padStart(2, '0')}-${arg.day.toString().padStart(2, '0')}`; } + else if (arg && __isHogDateTime(arg)) { return __DateTimeToString(arg); } + return __printHogStringOutput(arg); }""", + ["__isHogDate", "__isHogDateTime", "__printHogStringOutput", "__DateTimeToString"], + ], + "__isHogDate": [ + """function __isHogDate(obj) { return obj && obj.__hogDate__ === true }""", + [], + ], + "__isHogDateTime": [ + """function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true }""", + [], + ], + "__toHogDate": [ + """function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } }""", + [], + ], + "__toHogDateTime": [ + """function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; }""", + ["__isHogDate"], + ], + "__now": [ + """function __now(zone) { return __toHogDateTime(Date.now() / 1000, zone) }""", + ["__toHogDateTime"], + ], + "__toUnixTimestamp": [ + """function __toUnixTimestamp(input, zone) { + if (__isHogDateTime(input)) { return input.dt; } + if (__isHogDate(input)) { return __toHogDateTime(input).dt; } + const date = new Date(input); + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return Math.floor(date.getTime() / 1000);}""", + ["__isHogDateTime", "__isHogDate", "__toHogDateTime"], + ], + "__fromUnixTimestamp": [ + """function __fromUnixTimestamp(input) { return __toHogDateTime(input) }""", + ["__toHogDateTime"], + ], + "__toUnixTimestampMilli": [ + """function __toUnixTimestampMilli(input, zone) { return __toUnixTimestamp(input, zone) * 1000 }""", + ["__toUnixTimestamp"], + ], + "__fromUnixTimestampMilli": [ + """function __fromUnixTimestampMilli(input) { return __toHogDateTime(input / 1000) }""", + ["__toHogDateTime"], + ], + "__toTimeZone": [ + """function __toTimeZone(input, zone) { if (!__isHogDateTime(input)) { throw new Error('Expected a DateTime') }; return { ...input, zone }}""", + ["__isHogDateTime"], + ], + "__toDate": [ + """function __toDate(input) { let date; + if (typeof input === 'number') { date = new Date(input * 1000); } else { date = new Date(input); } + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return { __hogDate__: true, year: date.getUTCFullYear(), month: date.getUTCMonth() + 1, day: date.getUTCDate() }; }""", + [], + ], + "__toDateTime": [ + """function __toDateTime(input, zone) { let dt; + if (typeof input === 'number') { dt = input; } + else { const date = new Date(input); if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } dt = date.getTime() / 1000; } + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; }""", + [], + ], + "__formatDateTime": [ + """function __formatDateTime(input, format, zone) { + if (!__isHogDateTime(input)) { throw new Error('Expected a DateTime'); } + if (!format) { throw new Error('formatDateTime requires at least 2 arguments'); } + const timestamp = input.dt * 1000; + let date = new Date(timestamp); + if (!zone) { zone = 'UTC'; } + const padZero = (num, len = 2) => String(num).padStart(len, '0'); + const padSpace = (num, len = 2) => String(num).padStart(len, ' '); + const getDateComponent = (type, options = {}) => { + const formatter = new Intl.DateTimeFormat('en-US', { ...options, timeZone: zone }); + const parts = formatter.formatToParts(date); + const part = parts.find(p => p.type === type); + return part ? part.value : ''; + }; + const getNumericComponent = (type, options = {}) => { + const value = getDateComponent(type, options); + return parseInt(value, 10); + }; + const getWeekNumber = (d) => { + const dateInZone = new Date(d.toLocaleString('en-US', { timeZone: zone })); + const target = new Date(Date.UTC(dateInZone.getFullYear(), dateInZone.getMonth(), dateInZone.getDate())); + const dayNr = (target.getUTCDay() + 6) % 7; + target.setUTCDate(target.getUTCDate() - dayNr + 3); + const firstThursday = new Date(Date.UTC(target.getUTCFullYear(), 0, 4)); + const weekNumber = 1 + Math.round(((target - firstThursday) / 86400000 - 3 + ((firstThursday.getUTCDay() + 6) % 7)) / 7); + return weekNumber; + }; + const getDayOfYear = (d) => { + const startOfYear = new Date(Date.UTC(d.getUTCFullYear(), 0, 1)); + const dateInZone = new Date(d.toLocaleString('en-US', { timeZone: zone })); + const diff = dateInZone - startOfYear; + return Math.floor(diff / 86400000) + 1; + }; + // Token mapping with corrections + const tokens = { + '%a': () => getDateComponent('weekday', { weekday: 'short' }), + '%b': () => getDateComponent('month', { month: 'short' }), + '%c': () => padZero(getNumericComponent('month', { month: '2-digit' })), + '%C': () => getDateComponent('year', { year: '2-digit' }), + '%d': () => padZero(getNumericComponent('day', { day: '2-digit' })), + '%D': () => { + const month = padZero(getNumericComponent('month', { month: '2-digit' })); + const day = padZero(getNumericComponent('day', { day: '2-digit' })); + const year = getDateComponent('year', { year: '2-digit' }); + return `${month}/${day}/${year}`; + }, + '%e': () => padSpace(getNumericComponent('day', { day: 'numeric' })), + '%F': () => { + const year = getNumericComponent('year', { year: 'numeric' }); + const month = padZero(getNumericComponent('month', { month: '2-digit' })); + const day = padZero(getNumericComponent('day', { day: '2-digit' })); + return `${year}-${month}-${day}`; + }, + '%g': () => getDateComponent('year', { year: '2-digit' }), + '%G': () => getNumericComponent('year', { year: 'numeric' }), + '%h': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%H': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })), + '%i': () => padZero(getNumericComponent('minute', { minute: '2-digit' })), + '%I': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%j': () => padZero(getDayOfYear(date), 3), + '%k': () => padSpace(getNumericComponent('hour', { hour: 'numeric', hour12: false })), + '%l': () => padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })), + '%m': () => padZero(getNumericComponent('month', { month: '2-digit' })), + '%M': () => getDateComponent('month', { month: 'long' }), + '%n': () => '\\n', + '%p': () => getDateComponent('dayPeriod', { hour: 'numeric', hour12: true }), + '%r': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: true })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + const second = padZero(getNumericComponent('second', { second: '2-digit' })); + const period = getDateComponent('dayPeriod', { hour: 'numeric', hour12: true }); + return `${hour}:${minute} ${period}`; + }, + '%R': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + return `${hour}:${minute}`; + }, + '%s': () => padZero(getNumericComponent('second', { second: '2-digit' })), + '%S': () => padZero(getNumericComponent('second', { second: '2-digit' })), + '%t': () => '\\t', + '%T': () => { + const hour = padZero(getNumericComponent('hour', { hour: '2-digit', hour12: false })); + const minute = padZero(getNumericComponent('minute', { minute: '2-digit' })); + const second = padZero(getNumericComponent('second', { second: '2-digit' })); + return `${hour}:${minute}:${second}`; + }, + '%u': () => { + let day = getDateComponent('weekday', { weekday: 'short' }); + const dayMap = { 'Mon': '1', 'Tue': '2', 'Wed': '3', 'Thu': '4', 'Fri': '5', 'Sat': '6', 'Sun': '7' }; + return dayMap[day]; + }, + '%V': () => padZero(getWeekNumber(date)), + '%w': () => { + let day = getDateComponent('weekday', { weekday: 'short' }); + const dayMap = { 'Sun': '0', 'Mon': '1', 'Tue': '2', 'Wed': '3', 'Thu': '4', 'Fri': '5', 'Sat': '6' }; + return dayMap[day]; + }, + '%W': () => getDateComponent('weekday', { weekday: 'long' }), + '%y': () => getDateComponent('year', { year: '2-digit' }), + '%Y': () => getNumericComponent('year', { year: 'numeric' }), + '%z': () => { + if (zone === 'UTC') { + return '+0000'; + } else { + const formatter = new Intl.DateTimeFormat('en-US', { + timeZone: zone, + timeZoneName: 'shortOffset', + }); + const parts = formatter.formatToParts(date); + const offsetPart = parts.find(part => part.type === 'timeZoneName'); + if (offsetPart && offsetPart.value) { + const offsetValue = offsetPart.value; + const match = offsetValue.match(/GMT([+-]\\d{1,2})(?::(\\d{2}))?/); + if (match) { + const sign = match[1][0]; + const hours = padZero(Math.abs(parseInt(match[1], 10))); + const minutes = padZero(match[2] ? parseInt(match[2], 10) : 0); + return `${sign}${hours}${minutes}`; + } + } + return ''; + } + }, + '%%': () => '%', + }; + + // Replace tokens in the format string + let result = ''; + let i = 0; + while (i < format.length) { + if (format[i] === '%') { + const token = format.substring(i, i + 2); + if (tokens[token]) { + result += tokens[token](); + i += 2; + } else { + // If token not found, include '%' and move to next character + result += format[i]; + i += 1; + } + } else { + result += format[i]; + i += 1; + } + } + + return result; +} +""", + ["__isHogDateTime"], + ], + "__printHogStringOutput": [ + """function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } """, + ["__printHogValue"], + ], + "__printHogValue": [ + """ +function __printHogValue(obj, marked = new Set()) { + if (typeof obj === 'object' && obj !== null && obj !== undefined) { + if (marked.has(obj) && !__isHogDateTime(obj) && !__isHogDate(obj) && !__isHogError(obj)) { return 'null'; } + marked.add(obj); + try { + if (Array.isArray(obj)) { + if (obj.__isHogTuple) { return obj.length < 2 ? `tuple(${obj.map((o) => __printHogValue(o, marked)).join(', ')})` : `(${obj.map((o) => __printHogValue(o, marked)).join(', ')})`; } + return `[${obj.map((o) => __printHogValue(o, marked)).join(', ')}]`; + } + if (__isHogDateTime(obj)) { const millis = String(obj.dt); return `DateTime(${millis}${millis.includes('.') ? '' : '.0'}, ${__escapeString(obj.zone)})`; } + if (__isHogDate(obj)) return `Date(${obj.year}, ${obj.month}, ${obj.day})`; + if (__isHogError(obj)) { return `${String(obj.type)}(${__escapeString(obj.message)}${obj.payload ? `, ${__printHogValue(obj.payload, marked)}` : ''})`; } + if (obj instanceof Map) { return `{${Array.from(obj.entries()).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; } + return `{${Object.entries(obj).map(([key, value]) => `${__printHogValue(key, marked)}: ${__printHogValue(value, marked)}`).join(', ')}}`; + } finally { + marked.delete(obj); + } + } else if (typeof obj === 'boolean') return obj ? 'true' : 'false'; + else if (obj === null || obj === undefined) return 'null'; + else if (typeof obj === 'string') return __escapeString(obj); + if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; + return obj.toString(); +} +""", + [ + "__isHogDateTime", + "__isHogDate", + "__isHogError", + "__escapeString", + "__escapeIdentifier", + ], + ], + "__escapeString": [ + """ +function __escapeString(value) { + const singlequoteEscapeCharsMap = { '\\b': '\\\\b', '\\f': '\\\\f', '\\r': '\\\\r', '\\n': '\\\\n', '\\t': '\\\\t', '\\0': '\\\\0', '\\v': '\\\\v', '\\\\': '\\\\\\\\', "'": "\\\\'" } + return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; +} +""", + [], + ], + "__escapeIdentifier": [ + """ +function __escapeIdentifier(identifier) { + const backquoteEscapeCharsMap = { '\\b': '\\\\b', '\\f': '\\\\f', '\\r': '\\\\r', '\\n': '\\\\n', '\\t': '\\\\t', '\\0': '\\\\0', '\\v': '\\\\v', '\\\\': '\\\\\\\\', '`': '\\\\`' } + if (typeof identifier === 'number') return identifier.toString(); + if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; + return `\\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\\``; +} +""", + [], + ], + "__newHogError": [ + """ +function __newHogError(type, message, payload) { + let error = new Error(message || 'An error occurred'); + error.__hogError__ = true + error.type = type + error.payload = payload + return error +} +""", + [], + ], + "__isHogError": [ + """function __isHogError(obj) {return obj && obj.__hogError__ === true}""", + [], + ], + "__getNestedValue": [ + """ +function __getNestedValue(obj, path, allowNull = false) { + let current = obj + for (const key of path) { + if (current == null) { + return null + } + if (current instanceof Map) { + current = current.get(key) + } else if (typeof current === 'object' && current !== null) { + current = current[key] + } else { + return null + } + } + if (current === null && !allowNull) { + return null + } + return current +} +""", + [], + ], + "__like": [ + """ +function __like(str, pattern, caseInsensitive = false) { + if (caseInsensitive) { + str = str.toLowerCase() + pattern = pattern.toLowerCase() + } + pattern = String(pattern) + .replaceAll(/[-/\\\\^$*+?.()|[\\]{}]/g, '\\\\$&') + .replaceAll('%', '.*') + .replaceAll('_', '.') + return new RegExp(pattern).test(str) +} +""", + [], + ], + "__getProperty": [ + """ +function __getProperty(objectOrArray, key, nullish) { + if ((nullish && !objectOrArray) || key === 0) { return null } + if (Array.isArray(objectOrArray)) { + return key > 0 ? objectOrArray[key - 1] : objectOrArray[objectOrArray.length + key] + } else { + return objectOrArray[key] + } +} +""", + [], + ], + "__setProperty": [ + """ +function __setProperty(objectOrArray, key, value) { + if (Array.isArray(objectOrArray)) { + if (key > 0) { + objectOrArray[key - 1] = value + } else { + objectOrArray[objectOrArray.length + key] = value + } + } else { + objectOrArray[key] = value + } + return objectOrArray +} +""", + [], + ], + "__lambda": [ + """function __lambda (fn) { return fn }""", + [], + ], +} + + +def import_stl_functions(requested_functions): + """ + Given a list of requested function names, returns a string containing the code + for these functions and all their dependencies, in an order suitable for evaluation. + """ + + # Set to keep track of all required functions + required_functions = set() + visited = set() + + # Recursive function to find all dependencies + def dfs(func_name): + if func_name in visited: + return + visited.add(func_name) + if func_name not in STL_FUNCTIONS: + raise ValueError(f"Function '{func_name}' is not defined.") + _, dependencies = STL_FUNCTIONS[func_name] + for dep in sorted(dependencies): + dfs(dep) + required_functions.add(func_name) + + # Start DFS from each requested function + for func in requested_functions: + dfs(func) + + # Build the dependency graph + dependency_graph = {} + for func in sorted(required_functions): + _, dependencies = STL_FUNCTIONS[func] + dependency_graph[func] = dependencies + + # Perform topological sort + def topological_sort(graph): + visited = set() + temp_mark = set() + result = [] + + def visit(node): + if node in visited: + return + if node in temp_mark: + raise ValueError(f"Circular dependency detected involving {node}") + temp_mark.add(node) + for neighbor in sorted(graph.get(node, [])): + visit(neighbor) + temp_mark.remove(node) + visited.add(node) + result.append(node) + + for node in sorted(graph): + visit(node) + return result[::-1] # reverse the list to get correct order + + sorted_functions = topological_sort(dependency_graph) + + # Build the final code + code_pieces = [] + for func in sorted_functions: + code, _ = STL_FUNCTIONS[func] + code_pieces.append(str(code).strip()) + + return "\n".join(code_pieces) diff --git a/posthog/hogql/test/test_bytecode.py b/posthog/hogql/compiler/test/test_bytecode.py similarity index 99% rename from posthog/hogql/test/test_bytecode.py rename to posthog/hogql/compiler/test/test_bytecode.py index 860acb7cdec..be2c65008cc 100644 --- a/posthog/hogql/test/test_bytecode.py +++ b/posthog/hogql/compiler/test/test_bytecode.py @@ -1,6 +1,6 @@ import pytest -from posthog.hogql.bytecode import to_bytecode, execute_hog, create_bytecode +from posthog.hogql.compiler.bytecode import to_bytecode, execute_hog, create_bytecode from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H, HOGQL_BYTECODE_VERSION from posthog.hogql.errors import NotImplementedError, QueryError from posthog.hogql.parser import parse_program diff --git a/posthog/hogql/compiler/test/test_javascript.py b/posthog/hogql/compiler/test/test_javascript.py new file mode 100644 index 00000000000..b6848a664fd --- /dev/null +++ b/posthog/hogql/compiler/test/test_javascript.py @@ -0,0 +1,227 @@ +from posthog.hogql.compiler.javascript import JavaScriptCompiler, Local, _sanitize_identifier, to_js_program, to_js_expr +from posthog.hogql.errors import NotImplementedError, QueryError +from posthog.hogql import ast +from posthog.test.base import BaseTest + + +class TestSanitizeIdentifier(BaseTest): + def test_valid_identifiers(self): + self.assertEqual(_sanitize_identifier("validName"), "validName") + self.assertEqual(_sanitize_identifier("_validName123"), "_validName123") + + def test_keywords(self): + self.assertEqual(_sanitize_identifier("await"), "__x_await") + self.assertEqual(_sanitize_identifier("class"), "__x_class") + + def test_internal_conflicts(self): + self.assertEqual(_sanitize_identifier("__x_internal"), "__x___x_internal") + + def test_invalid_identifiers(self): + self.assertEqual(_sanitize_identifier("123invalid"), '["123invalid"]') + self.assertEqual(_sanitize_identifier("invalid-name"), '["invalid-name"]') + + def test_integer_identifiers(self): + self.assertEqual(_sanitize_identifier(123), '["123"]') + + +class TestJavaScript(BaseTest): + def test_javascript_create_basic_expressions(self): + self.assertEqual(to_js_expr("1 + 2"), "(1 + 2)") + self.assertEqual(to_js_expr("1 and 2"), "!!(1 && 2)") + self.assertEqual(to_js_expr("1 or 2"), "!!(1 || 2)") + self.assertEqual(to_js_expr("not true"), "(!true)") + self.assertEqual(to_js_expr("1 < 2"), "(1 < 2)") + self.assertEqual(to_js_expr("properties.bla"), '__getProperty(__getGlobal("properties"), "bla", true)') + + def test_javascript_string_functions(self): + self.assertEqual(to_js_expr("concat('a', 'b')"), 'concat("a", "b")') + self.assertEqual(to_js_expr("lower('HELLO')"), 'lower("HELLO")') + self.assertEqual(to_js_expr("upper('hello')"), 'upper("hello")') + self.assertEqual(to_js_expr("reverse('abc')"), 'reverse("abc")') + + def test_arithmetic_operations(self): + self.assertEqual(to_js_expr("3 - 1"), "(3 - 1)") + self.assertEqual(to_js_expr("2 * 3"), "(2 * 3)") + self.assertEqual(to_js_expr("5 / 2"), "(5 / 2)") + self.assertEqual(to_js_expr("10 % 3"), "(10 % 3)") + + def test_comparison_operations(self): + self.assertEqual(to_js_expr("3 = 4"), "(3 == 4)") + self.assertEqual(to_js_expr("3 != 4"), "(3 != 4)") + self.assertEqual(to_js_expr("3 < 4"), "(3 < 4)") + self.assertEqual(to_js_expr("3 <= 4"), "(3 <= 4)") + self.assertEqual(to_js_expr("3 > 4"), "(3 > 4)") + self.assertEqual(to_js_expr("3 >= 4"), "(3 >= 4)") + + def test_javascript_create_query_error(self): + with self.assertRaises(QueryError) as e: + to_js_expr("1 in cohort 2") + self.assertIn( + "Can't use cohorts in real-time filters. Please inline the relevant expressions", str(e.exception) + ) + + def test_scope_errors(self): + compiler = JavaScriptCompiler(locals=[Local(name="existing_var", depth=0)]) + compiler._start_scope() + compiler._declare_local("new_var") + with self.assertRaises(QueryError): + compiler._declare_local("new_var") + compiler._end_scope() + + def test_arithmetic_operation(self): + code = to_js_expr("3 + 5 * (10 / 2) - 7") + self.assertEqual(code, "((3 + (5 * (10 / 2))) - 7)") + + def test_comparison(self): + code = to_js_expr("1 in 2") + self.assertEqual(code, "(2.includes(1))") + + def test_if_else(self): + code = to_js_program("if (1 < 2) { return true } else { return false }") + expected_code = "if ((1 < 2)) {\n return true;\n} else {\n return false;\n}" + self.assertEqual(code.strip(), expected_code.strip()) + + def test_declare_local(self): + compiler = JavaScriptCompiler() + compiler._declare_local("a_var") + self.assertIn("a_var", [local.name for local in compiler.locals]) + + def test_visit_return_statement(self): + compiler = JavaScriptCompiler() + code = compiler.visit_return_statement(ast.ReturnStatement(expr=ast.Constant(value="test"))) + self.assertEqual(code, 'return "test";') + + def test_not_implemented_visit_select_query(self): + with self.assertRaises(NotImplementedError) as e: + to_js_expr("(select 1)") + self.assertEqual(str(e.exception), "JavaScriptCompiler does not support SelectQuery") + + def test_throw_statement(self): + compiler = JavaScriptCompiler() + code = compiler.visit_throw_statement(ast.ThrowStatement(expr=ast.Constant(value="Error!"))) + self.assertEqual(code, 'throw "Error!";') + + def test_visit_dict(self): + code = to_js_expr("{'key1': 'value1', 'key2': 'value2'}") + self.assertEqual(code, '{"key1": "value1", "key2": "value2"}') + + def test_visit_array(self): + code = to_js_expr("[1, 2, 3, 4]") + self.assertEqual(code, "[1, 2, 3, 4]") + + def test_visit_lambda(self): + code = to_js_expr("x -> x + 1") + self.assertTrue(code.startswith("__lambda((x) => (x + 1))")) + + def test_inlined_stl(self): + compiler = JavaScriptCompiler() + compiler.inlined_stl.add("concat") + stl_code = compiler.get_inlined_stl() + self.assertIn("function concat", stl_code) + + def test_sanitize_keywords(self): + self.assertEqual(_sanitize_identifier("for"), "__x_for") + self.assertEqual(_sanitize_identifier("await"), "__x_await") + + def test_json_parse(self): + code = to_js_expr('jsonParse(\'{"key": "value"}\')') + self.assertEqual(code, 'jsonParse("{\\"key\\": \\"value\\"}")') + + def test_javascript_create_2(self): + self.assertEqual(to_js_expr("1 + 2"), "(1 + 2)") + self.assertEqual(to_js_expr("1 and 2"), "!!(1 && 2)") + self.assertEqual(to_js_expr("1 or 2"), "!!(1 || 2)") + self.assertEqual(to_js_expr("1 or (2 and 1) or 2"), "!!(1 || !!(2 && 1) || 2)") + self.assertEqual(to_js_expr("(1 or 2) and (1 or 2)"), "!!(!!(1 || 2) && !!(1 || 2))") + self.assertEqual(to_js_expr("not true"), "(!true)") + self.assertEqual(to_js_expr("true"), "true") + self.assertEqual(to_js_expr("false"), "false") + self.assertEqual(to_js_expr("null"), "null") + self.assertEqual(to_js_expr("3.14"), "3.14") + self.assertEqual(to_js_expr("properties.bla"), '__getProperty(__getGlobal("properties"), "bla", true)') + self.assertEqual(to_js_expr("concat('arg', 'another')"), 'concat("arg", "another")') + self.assertEqual( + to_js_expr("ifNull(properties.email, false)"), + '(__getProperty(__getGlobal("properties"), "email", true) ?? false)', + ) + self.assertEqual(to_js_expr("1 in 2"), "(2.includes(1))") + self.assertEqual(to_js_expr("1 not in 2"), "(!2.includes(1))") + self.assertEqual(to_js_expr("match('test', 'e.*')"), 'match("test", "e.*")') + self.assertEqual(to_js_expr("not('test')"), '(!"test")') + self.assertEqual(to_js_expr("or('test', 'test2')"), '("test" || "test2")') + self.assertEqual(to_js_expr("and('test', 'test2')"), '("test" && "test2")') + + def test_javascript_code_generation(self): + js_code = to_js_program(""" + fun fibonacci(number) { + if (number < 2) { + return number; + } else { + return fibonacci(number - 1) + fibonacci(number - 2); + } + } + return fibonacci(6); + """) + expected_js = """function fibonacci(number) { + if ((number < 2)) { + return number; + } else { + return (fibonacci((number - 1)) + fibonacci((number - 2))); + } +} +return fibonacci(6);""" + self.assertEqual(js_code.strip(), expected_js.strip()) + + def test_javascript_hogqlx(self): + code = to_js_expr("") + self.assertEqual(code.strip(), '{"__hx_tag": "Sparkline", "data": [1, 2, 3]}') + + def test_sanitized_function_names(self): + code = to_js_expr("typeof('test')") + self.assertEqual(code, '__x_typeof("test")') + + def test_function_name_sanitization(self): + code = to_js_expr("Error('An error occurred')") + self.assertEqual(code, '__x_Error("An error occurred")') + + def test_ilike(self): + code = to_js_expr("'hello' ilike '%ELLO%'") + self.assertEqual(code, 'ilike("hello", "%ELLO%")') + + def test_not_ilike(self): + code = to_js_expr("'hello' not ilike '%ELLO%'") + self.assertEqual(code, '!ilike("hello", "%ELLO%")') + + def test_regex(self): + code = to_js_expr("'hello' =~ 'h.*o'") + self.assertEqual(code, 'new RegExp("h.*o").test("hello")') + + def test_not_regex(self): + code = to_js_expr("'hello' !~ 'h.*o'") + self.assertEqual(code, '!(new RegExp("h.*o").test("hello"))') + + def test_i_regex(self): + code = to_js_expr("'hello' =~* 'H.*O'") + self.assertEqual(code, 'new RegExp("H.*O", "i").test("hello")') + + def test_not_i_regex(self): + code = to_js_expr("'hello' !~* 'H.*O'") + self.assertEqual(code, '!(new RegExp("H.*O", "i").test("hello"))') + + def test_array_access(self): + code = to_js_expr("array[2]") + self.assertEqual(code, '__getProperty(__getGlobal("array"), 2, false)') + + def test_tuple_access(self): + code = to_js_expr("(1, 2, 3).2") + self.assertEqual(code, "__getProperty(tuple(1, 2, 3), 2, false)") + + def test_function_assignment_error(self): + compiler = JavaScriptCompiler() + with self.assertRaises(QueryError) as context: + compiler.visit_variable_assignment( + ast.VariableAssignment(left=ast.Field(chain=["globalVar"]), right=ast.Constant(value=42)) + ) + self.assertIn( + 'Variable "globalVar" not declared in this scope. Cannot assign to globals.', str(context.exception) + ) diff --git a/posthog/hogql/metadata.py b/posthog/hogql/metadata.py index fdfaa8bcde7..de309adf441 100644 --- a/posthog/hogql/metadata.py +++ b/posthog/hogql/metadata.py @@ -2,20 +2,30 @@ from typing import Optional, cast from django.conf import settings -from posthog.hogql.bytecode import create_bytecode +from posthog.hogql import ast +from posthog.hogql.compiler.bytecode import create_bytecode from posthog.hogql.context import HogQLContext from posthog.hogql.errors import ExposedHogQLError from posthog.hogql.filters import replace_filters -from posthog.hogql.parser import parse_select, parse_program, parse_expr, parse_string_template +from posthog.hogql.parser import ( + parse_expr, + parse_program, + parse_select, + parse_string_template, +) from posthog.hogql.printer import print_ast from posthog.hogql.query import create_default_modifiers_for_team +from posthog.hogql.resolver_utils import extract_select_queries from posthog.hogql.variables import replace_variables from posthog.hogql.visitor import clone_expr from posthog.hogql_queries.query_runner import get_query_runner from posthog.models import Team -from posthog.hogql.resolver_utils import extract_select_queries -from posthog.schema import HogQLMetadataResponse, HogQLMetadata, HogQLNotice, HogLanguage -from posthog.hogql import ast +from posthog.schema import ( + HogLanguage, + HogQLMetadata, + HogQLMetadataResponse, + HogQLNotice, +) def get_hogql_metadata( @@ -116,8 +126,15 @@ def process_expr_on_table( def is_valid_view(select_query: ast.SelectQuery | ast.SelectSetQuery) -> bool: + """Is not a valid view if: + a) There are any function calls in the select clause + b) There are any wildcard fields in the select clause + """ for query in extract_select_queries(select_query): for field in query.select: - if not isinstance(field, ast.Alias): + if isinstance(field, ast.Call): return False + if isinstance(field, ast.Field): + if field.chain and field.chain[-1] == "*": + return False return True diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py index e19778e6f3f..ca617283b51 100644 --- a/posthog/hogql/test/test_metadata.py +++ b/posthog/hogql/test/test_metadata.py @@ -1,11 +1,12 @@ from typing import Optional -from posthog.hogql.metadata import get_hogql_metadata -from posthog.models import PropertyDefinition, Cohort -from posthog.schema import HogQLMetadata, HogQLMetadataResponse, HogQLQuery, HogLanguage -from posthog.test.base import APIBaseTest, ClickhouseTestMixin from django.test import override_settings +from posthog.hogql.metadata import get_hogql_metadata +from posthog.models import Cohort, PropertyDefinition +from posthog.schema import HogLanguage, HogQLMetadata, HogQLMetadataResponse, HogQLQuery +from posthog.test.base import APIBaseTest, ClickhouseTestMixin + class TestMetadata(ClickhouseTestMixin, APIBaseTest): maxDiff = None @@ -383,3 +384,83 @@ class TestMetadata(ClickhouseTestMixin, APIBaseTest): "errors": [{"end": 17, "fix": None, "message": "Hog function `NONO` is not implemented", "start": 11}], }, ) + + def test_is_valid_view_when_all_fields_have_aliases(self): + metadata = self._select("SELECT event AS event FROM events") + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": True, + "query": "SELECT event AS event FROM events", + "errors": [], + }, + ) + + def test_is_valid_view_is_true_when_not_all_fields_have_aliases(self): + metadata = self._select("SELECT event AS event, uuid FROM events") + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": True, + "query": "SELECT event AS event, uuid FROM events", + "errors": [], + }, + ) + + def test_is_valid_view_is_false_when_fields_that_are_transformations_dont_have_aliases(self): + metadata = self._select("SELECT toDate(timestamp), count() FROM events GROUP BY toDate(timestamp)") + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": False, + "query": "SELECT toDate(timestamp), count() FROM events GROUP BY toDate(timestamp)", + "errors": [], + }, + ) + + def test_is_valid_view_is_true_when_fields_that_are_transformations_have_aliases(self): + metadata = self._select( + "SELECT toDate(timestamp) as timestamp, count() as total_count FROM events GROUP BY timestamp" + ) + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": True, + "query": "SELECT toDate(timestamp) as timestamp, count() as total_count FROM events GROUP BY timestamp", + "errors": [], + }, + ) + + def test_is_valid_view_is_false_when_using_asterisk(self): + metadata = self._select("SELECT * FROM events") + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": False, + "query": "SELECT * FROM events", + "errors": [], + }, + ) + + def test_is_valid_view_is_false_when_using_scoped_asterisk(self): + metadata = self._select("SELECT e.* FROM events e") + self.assertEqual( + metadata.dict(), + metadata.dict() + | { + "isValid": True, + "isValidView": False, + "query": "SELECT e.* FROM events e", + "errors": [], + }, + ) diff --git a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py index 2c0898d1d7e..edd8b0f1f91 100644 --- a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py @@ -34,6 +34,10 @@ class ExperimentFunnelsQueryRunner(QueryRunner): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + + if not self.query.experiment_id: + raise ValidationError("experiment_id is required") + self.experiment = Experiment.objects.get(id=self.query.experiment_id) self.feature_flag = self.experiment.feature_flag self.variants = [variant["key"] for variant in self.feature_flag.variants] diff --git a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py index 30247912822..895142a936d 100644 --- a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py @@ -42,6 +42,10 @@ class ExperimentTrendsQueryRunner(QueryRunner): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + + if not self.query.experiment_id: + raise ValidationError("experiment_id is required") + self.experiment = Experiment.objects.get(id=self.query.experiment_id) self.feature_flag = self.experiment.feature_flag self.variants = [variant["key"] for variant in self.feature_flag.variants] @@ -149,6 +153,9 @@ class ExperimentTrendsQueryRunner(QueryRunner): if hasattr(count_event, "event"): prepared_exposure_query.dateRange = self._get_insight_date_range() prepared_exposure_query.breakdownFilter = self._get_breakdown_filter() + prepared_exposure_query.trendsFilter = TrendsFilter( + display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE + ) prepared_exposure_query.series = [ EventsNode( event=count_event.event, @@ -171,6 +178,7 @@ class ExperimentTrendsQueryRunner(QueryRunner): prepared_exposure_query = TrendsQuery(**self.query.exposure_query.model_dump()) prepared_exposure_query.dateRange = self._get_insight_date_range() prepared_exposure_query.breakdownFilter = self._get_breakdown_filter() + prepared_exposure_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE) prepared_exposure_query.properties = [ EventPropertyFilter( key=self.breakdown_key, @@ -183,7 +191,11 @@ class ExperimentTrendsQueryRunner(QueryRunner): else: prepared_exposure_query = TrendsQuery( dateRange=self._get_insight_date_range(), - breakdownFilter=self._get_breakdown_filter(), + trendsFilter=TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE), + breakdownFilter=BreakdownFilter( + breakdown="$feature_flag_response", + breakdown_type="event", + ), series=[ EventsNode( event="$feature_flag_called", @@ -192,7 +204,7 @@ class ExperimentTrendsQueryRunner(QueryRunner): ], properties=[ EventPropertyFilter( - key=self.breakdown_key, + key="$feature_flag_response", value=self.variants, operator="exact", type="event", diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py index 85a818987db..16e0dde1b1f 100644 --- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py @@ -250,7 +250,7 @@ class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "control", "$feature_flag": feature_flag.key}, + "properties": {"$feature_flag_response": "control", "$feature_flag": feature_flag.key}, }, ], "user_control_2": [ @@ -258,7 +258,7 @@ class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "control", "$feature_flag": feature_flag.key}, + "properties": {"$feature_flag_response": "control", "$feature_flag": feature_flag.key}, }, ], "user_test_1": [ @@ -268,7 +268,7 @@ class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "test", "$feature_flag": feature_flag.key}, + "properties": {"$feature_flag_response": "test", "$feature_flag": feature_flag.key}, }, ], "user_test_2": [ @@ -277,7 +277,7 @@ class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "test", "$feature_flag": feature_flag.key}, + "properties": {"$feature_flag_response": "test", "$feature_flag": feature_flag.key}, }, ], "user_out_of_control": [ @@ -291,7 +291,7 @@ class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): { "event": "$feature_flag_called", "timestamp": "2019-01-01", - "properties": {ff_property: "control", "$feature_flag": feature_flag.key}, + "properties": {"$feature_flag_response": "control", "$feature_flag": feature_flag.key}, }, ], }, diff --git a/posthog/migrations/0519_errortrackingissue_description_and_more.py b/posthog/migrations/0519_errortrackingissue_description_and_more.py new file mode 100644 index 00000000000..f3faae82726 --- /dev/null +++ b/posthog/migrations/0519_errortrackingissue_description_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.15 on 2024-11-14 17:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0518_survey_internal_response_sampling_flag"), + ] + + operations = [ + migrations.AddField( + model_name="errortrackingissue", + name="description", + field=models.TextField(blank=True, null=True), + ), + migrations.AddField( + model_name="errortrackingissue", + name="name", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/posthog/migrations/0520_experiment_metrics_secondary.py b/posthog/migrations/0520_experiment_metrics_secondary.py new file mode 100644 index 00000000000..cb8f0d61114 --- /dev/null +++ b/posthog/migrations/0520_experiment_metrics_secondary.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-11-18 12:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0519_errortrackingissue_description_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="experiment", + name="metrics_secondary", + field=models.JSONField(blank=True, default=list, null=True), + ), + ] diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt index 54603fe73b4..31b8c212f62 100644 --- a/posthog/migrations/max_migration.txt +++ b/posthog/migrations/max_migration.txt @@ -1 +1 @@ -0518_survey_internal_response_sampling_flag \ No newline at end of file +0520_experiment_metrics_secondary diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py index e049f7e7e54..d920b0913a7 100644 --- a/posthog/models/action/action.py +++ b/posthog/models/action/action.py @@ -90,7 +90,7 @@ class Action(models.Model): def refresh_bytecode(self): from posthog.hogql.property import action_to_expr - from posthog.hogql.bytecode import create_bytecode + from posthog.hogql.compiler.bytecode import create_bytecode try: new_bytecode = create_bytecode(action_to_expr(self)).bytecode diff --git a/posthog/models/error_tracking/error_tracking.py b/posthog/models/error_tracking/error_tracking.py index 982f21e4fa3..7e42f55cf9f 100644 --- a/posthog/models/error_tracking/error_tracking.py +++ b/posthog/models/error_tracking/error_tracking.py @@ -17,6 +17,8 @@ class ErrorTrackingIssue(UUIDModel): team = models.ForeignKey(Team, on_delete=models.CASCADE) created_at = models.DateTimeField(auto_now_add=True) status = models.TextField(choices=Status.choices, default=Status.ACTIVE, null=False) + name = models.TextField(null=True, blank=True) + description = models.TextField(null=True, blank=True) class ErrorTrackingIssueAssignment(UUIDModel): diff --git a/posthog/models/experiment.py b/posthog/models/experiment.py index 5581e197a9b..87119d292b6 100644 --- a/posthog/models/experiment.py +++ b/posthog/models/experiment.py @@ -41,6 +41,7 @@ class Experiment(models.Model): variants = models.JSONField(default=dict, null=True, blank=True) metrics = models.JSONField(default=list, null=True, blank=True) + metrics_secondary = models.JSONField(default=list, null=True, blank=True) saved_metrics: models.ManyToManyField = models.ManyToManyField( "ExperimentSavedMetric", blank=True, related_name="experiments", through="ExperimentToSavedMetric" ) diff --git a/posthog/schema.py b/posthog/schema.py index 14740b491b6..13cec8193d2 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -247,6 +247,14 @@ class AssistantMessage(BaseModel): extra="forbid", ) content: str + done: Optional[bool] = Field( + default=None, + description=( + 'We only need this "done" value to tell when the particular message is finished during its streaming. It' + " won't be necessary when we optimize streaming to NOT send the entire message every time a character is" + " added." + ), + ) type: Literal["ai"] = "ai" @@ -857,6 +865,7 @@ class FailureMessage(BaseModel): extra="forbid", ) content: Optional[str] = None + done: Literal[True] = True type: Literal["ai/failure"] = "ai/failure" @@ -1054,6 +1063,7 @@ class HumanMessage(BaseModel): extra="forbid", ) content: str + done: Literal[True] = Field(default=True, description="Human messages are only appended when done.") type: Literal["human"] = "human" @@ -1450,6 +1460,7 @@ class RouterMessage(BaseModel): extra="forbid", ) content: str + done: Literal[True] = Field(default=True, description="Router messages are not streamed, so they can only be done.") type: Literal["ai/router"] = "ai/router" @@ -5543,6 +5554,7 @@ class VisualizationMessage(BaseModel): extra="forbid", ) answer: Optional[Union[AssistantTrendsQuery, AssistantFunnelsQuery]] = None + done: Optional[bool] = None plan: Optional[str] = None reasoning_steps: Optional[list[str]] = None type: Literal["ai/viz"] = "ai/viz" @@ -6720,12 +6732,13 @@ class ExperimentTrendsQuery(BaseModel): extra="forbid", ) count_query: TrendsQuery - experiment_id: int + experiment_id: Optional[int] = None exposure_query: Optional[TrendsQuery] = None kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery" modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) + name: Optional[str] = None response: Optional[ExperimentTrendsQueryResponse] = None @@ -6841,12 +6854,13 @@ class ExperimentFunnelsQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) - experiment_id: int + experiment_id: Optional[int] = None funnels_query: FunnelsQuery kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery" modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) + name: Optional[str] = None response: Optional[ExperimentFunnelsQueryResponse] = None diff --git a/posthog/tasks/test/test_update_survey_adaptive_sampling.py b/posthog/tasks/test/test_update_survey_adaptive_sampling.py index cd7964ffe15..91696cf1ce8 100644 --- a/posthog/tasks/test/test_update_survey_adaptive_sampling.py +++ b/posthog/tasks/test/test_update_survey_adaptive_sampling.py @@ -1,3 +1,4 @@ +import json from unittest.mock import patch, MagicMock from datetime import datetime from django.utils import timezone @@ -39,6 +40,18 @@ class TestUpdateSurveyAdaptiveSampling(BaseTest): self.assertEqual(internal_response_sampling_flag.rollout_percentage, 20) mock_get_count.assert_called_once_with(self.survey.id) + @freeze_time("2024-12-21T12:00:00Z") + @patch("posthog.tasks.update_survey_adaptive_sampling._get_survey_responses_count") + def test_updates_rollout_after_interval_is_over(self, mock_get_count: MagicMock) -> None: + mock_get_count.return_value = 50 + update_survey_adaptive_sampling() + internal_response_sampling_flag = FeatureFlag.objects.get(id=self.internal_response_sampling_flag.id) + self.assertEqual(internal_response_sampling_flag.rollout_percentage, 100) + mock_get_count.assert_called_once_with(self.survey.id) + survey = Survey.objects.get(id=self.survey.id) + response_sampling_daily_limits = json.loads(survey.response_sampling_daily_limits) + self.assertEqual(response_sampling_daily_limits[0].get("date"), "2024-12-22") + @freeze_time("2024-12-13T12:00:00Z") @patch("posthog.tasks.update_survey_adaptive_sampling._get_survey_responses_count") def test_no_update_when_limit_reached(self, mock_get_count: MagicMock) -> None: diff --git a/posthog/tasks/update_survey_adaptive_sampling.py b/posthog/tasks/update_survey_adaptive_sampling.py index bdd7d4ed048..bedf79f7f3c 100644 --- a/posthog/tasks/update_survey_adaptive_sampling.py +++ b/posthog/tasks/update_survey_adaptive_sampling.py @@ -1,5 +1,5 @@ import json -from datetime import datetime +from datetime import datetime, timedelta from django.utils.timezone import now from posthog.clickhouse.client import sync_execute @@ -29,6 +29,12 @@ def _update_survey_adaptive_sampling(survey: Survey) -> None: internal_response_sampling_flag.rollout_percentage = today_entry["rollout_percentage"] internal_response_sampling_flag.save() + # this also doubles as a way to check that we're processing the final entry in the current sequence. + if today_entry["rollout_percentage"] == 100: + tomorrow = today_date + timedelta(days=1) + survey.response_sampling_start_date = tomorrow + survey.save(update_fields=["response_sampling_start_date", "response_sampling_daily_limits"]) + def _get_survey_responses_count(survey_id: int) -> int: data = sync_execute( diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 84c798961e6..c5ebd4dddf1 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -1,6 +1,7 @@ import dataclasses import datetime as dt import json +import re from django.db import close_old_connections import posthoganalytics @@ -91,11 +92,13 @@ def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInputs) -> f"External data job failed for external data schema {inputs.schema_id} with error: {inputs.internal_error}" ) + internal_error_normalized = re.sub("[\n\r\t]", " ", inputs.internal_error) + source: ExternalDataSource = ExternalDataSource.objects.get(pk=inputs.source_id) non_retryable_errors = Non_Retryable_Schema_Errors.get(ExternalDataSource.Type(source.source_type)) if non_retryable_errors is not None: - has_non_retryable_error = any(error in inputs.internal_error for error in non_retryable_errors) + has_non_retryable_error = any(error in internal_error_normalized for error in non_retryable_errors) if has_non_retryable_error: logger.info("Schema has a non-retryable error - turning off syncing") posthoganalytics.capture( diff --git a/posthog/temporal/tests/data_imports/test_end_to_end.py b/posthog/temporal/tests/data_imports/test_end_to_end.py index 16824e69b9b..fce2047cd1c 100644 --- a/posthog/temporal/tests/data_imports/test_end_to_end.py +++ b/posthog/temporal/tests/data_imports/test_end_to_end.py @@ -5,6 +5,7 @@ from typing import Any, Optional from unittest import mock import aioboto3 +from deltalake import DeltaTable import posthoganalytics import psycopg import pytest @@ -938,3 +939,75 @@ async def test_non_retryable_error(team, stripe_customer): with pytest.raises(Exception): await sync_to_async(execute_hogql_query)("SELECT * FROM stripe_customer", team) + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.asyncio +async def test_non_retryable_error_with_special_characters(team, stripe_customer): + source = await sync_to_async(ExternalDataSource.objects.create)( + source_id=uuid.uuid4(), + connection_id=uuid.uuid4(), + destination_id=uuid.uuid4(), + team=team, + status="running", + source_type="Stripe", + job_inputs={"stripe_secret_key": "test-key", "stripe_account_id": "acct_id"}, + ) + + schema = await sync_to_async(ExternalDataSchema.objects.create)( + name="Customer", + team_id=team.pk, + source_id=source.pk, + sync_type=ExternalDataSchema.SyncType.FULL_REFRESH, + sync_type_config={}, + ) + + workflow_id = str(uuid.uuid4()) + inputs = ExternalDataWorkflowInputs( + team_id=team.id, + external_data_source_id=source.pk, + external_data_schema_id=schema.id, + ) + + with ( + mock.patch( + "posthog.temporal.data_imports.workflow_activities.check_billing_limits.list_limited_team_attributes", + ) as mock_list_limited_team_attributes, + mock.patch.object(posthoganalytics, "capture") as capture_mock, + ): + mock_list_limited_team_attributes.side_effect = Exception( + "401 Client Error:\nUnauthorized for url: https://api.stripe.com" + ) + + with pytest.raises(Exception): + await _execute_run(workflow_id, inputs, stripe_customer["data"]) + + capture_mock.assert_called_once() + + job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.get)(team_id=team.id, schema_id=schema.pk) + await sync_to_async(schema.refresh_from_db)() + + assert job.status == ExternalDataJob.Status.FAILED + assert schema.should_sync is False + + with pytest.raises(Exception): + await sync_to_async(execute_hogql_query)("SELECT * FROM stripe_customer", team) + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.asyncio +async def test_delta_table_deleted(team, stripe_balance_transaction): + workflow_id, inputs = await _run( + team=team, + schema_name="BalanceTransaction", + table_name="stripe_balancetransaction", + source_type="Stripe", + job_inputs={"stripe_secret_key": "test-key", "stripe_account_id": "acct_id"}, + mock_data_response=stripe_balance_transaction["data"], + sync_type=ExternalDataSchema.SyncType.FULL_REFRESH, + ) + + with mock.patch.object(DeltaTable, "delete") as mock_delta_table_delete: + await _execute_run(str(uuid.uuid4()), inputs, stripe_balance_transaction["data"]) + + mock_delta_table_delete.assert_called_once() diff --git a/posthog/warehouse/api/test/test_saved_query.py b/posthog/warehouse/api/test/test_saved_query.py index e739a1d0b5b..6bc7f0c07ac 100644 --- a/posthog/warehouse/api/test/test_saved_query.py +++ b/posthog/warehouse/api/test/test_saved_query.py @@ -124,7 +124,7 @@ class TestSavedQuery(APIBaseTest): }, }, ) - self.assertEqual(saved_view_2_response.status_code, 400, saved_view_2_response.content) + self.assertEqual(saved_view_2_response.status_code, 201, saved_view_2_response.content) def test_create_with_saved_query(self): response = self.client.post( diff --git a/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json b/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json index a2cb4e3a0a8..35683fb0fb9 100644 --- a/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json +++ b/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json @@ -1,12 +1,15 @@ { - "db_name": "PostgreSQL", - "query": "UPDATE cyclotron_jobs SET last_heartbeat = NOW() WHERE id = $1 AND lock_id = $2", - "describe": { - "columns": [], - "parameters": { - "Left": ["Uuid", "Uuid"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET last_heartbeat = NOW() WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] }, - "hash": "075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503" + "nullable": [] + }, + "hash": "075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503" } diff --git a/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json b/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json index 972b51fccc5..9329bd2f2da 100644 --- a/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json +++ b/rust/.sqlx/query-085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614.json @@ -1,12 +1,21 @@ { - "db_name": "PostgreSQL", - "query": "\n INSERT INTO posthog_errortrackingstackframe (raw_id, team_id, created_at, symbol_set_id, contents, resolved, id, context)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n ON CONFLICT (raw_id, team_id) DO UPDATE SET\n created_at = $3,\n symbol_set_id = $4,\n contents = $5,\n resolved = $6,\n context = $8\n ", - "describe": { - "columns": [], - "parameters": { - "Left": ["Text", "Int4", "Timestamptz", "Uuid", "Jsonb", "Bool", "Uuid", "Text"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "\n INSERT INTO posthog_errortrackingstackframe (raw_id, team_id, created_at, symbol_set_id, contents, resolved, id, context)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n ON CONFLICT (raw_id, team_id) DO UPDATE SET\n created_at = $3,\n symbol_set_id = $4,\n contents = $5,\n resolved = $6,\n context = $8\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int4", + "Timestamptz", + "Uuid", + "Jsonb", + "Bool", + "Uuid", + "Text" + ] }, - "hash": "085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614" + "nullable": [] + }, + "hash": "085d682315a548d578f63bb48d2f1997fc9cf1fb3436d742e9374ad4f2d55614" } diff --git a/rust/.sqlx/query-14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a.json b/rust/.sqlx/query-14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a.json index b489060b996..c1b847f8824 100644 --- a/rust/.sqlx/query-14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a.json +++ b/rust/.sqlx/query-14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a.json @@ -1,38 +1,47 @@ { - "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, issue_id, fingerprint, version FROM posthog_errortrackingissuefingerprintv2\n WHERE team_id = $1 AND fingerprint = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "issue_id", - "type_info": "Uuid" - }, - { - "ordinal": 3, - "name": "fingerprint", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "version", - "type_info": "Int8" - } - ], - "parameters": { - "Left": ["Int4", "Text"] - }, - "nullable": [false, false, false, false, false] + "db_name": "PostgreSQL", + "query": "\n SELECT id, team_id, issue_id, fingerprint, version FROM posthog_errortrackingissuefingerprintv2\n WHERE team_id = $1 AND fingerprint = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "issue_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "fingerprint", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int4", + "Text" + ] }, - "hash": "14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a" + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "14332a535d61ab0144d1f4dbb1cf68cd3acdf4dabc1eff87ae7bce49cee9328a" } diff --git a/rust/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json b/rust/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json index ffda6f4b70b..04f6c0838ae 100644 --- a/rust/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json +++ b/rust/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json @@ -1,123 +1,133 @@ { - "db_name": "PostgreSQL", - "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n NULL::bytea as vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "state: JobState", - "type_info": { - "Custom": { - "name": "jobstate", - "kind": { - "Enum": ["available", "completed", "failed", "running", "paused"] - } - } - } - }, - { - "ordinal": 3, - "name": "queue_name", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "priority", - "type_info": "Int2" - }, - { - "ordinal": 5, - "name": "function_id", - "type_info": "Uuid" - }, - { - "ordinal": 6, - "name": "created", - "type_info": "Timestamptz" - }, - { - "ordinal": 7, - "name": "last_transition", - "type_info": "Timestamptz" - }, - { - "ordinal": 8, - "name": "scheduled", - "type_info": "Timestamptz" - }, - { - "ordinal": 9, - "name": "transition_count", - "type_info": "Int2" - }, - { - "ordinal": 10, - "name": "vm_state", - "type_info": "Bytea" - }, - { - "ordinal": 11, - "name": "metadata", - "type_info": "Bytea" - }, - { - "ordinal": 12, - "name": "parameters", - "type_info": "Bytea" - }, - { - "ordinal": 13, - "name": "blob", - "type_info": "Bytea" - }, - { - "ordinal": 14, - "name": "lock_id", - "type_info": "Uuid" - }, - { - "ordinal": 15, - "name": "last_heartbeat", - "type_info": "Timestamptz" - }, - { - "ordinal": 16, - "name": "janitor_touch_count", - "type_info": "Int2" + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n NULL::bytea as vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": [ + "available", + "completed", + "failed", + "running", + "paused" + ] } - ], - "parameters": { - "Left": ["Text", "Int8", "Uuid"] - }, - "nullable": [ - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - null, - true, - true, - true, - true, - true, - false - ] + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Bytea" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Bytea" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Bytea" + }, + { + "ordinal": 13, + "name": "blob", + "type_info": "Bytea" + }, + { + "ordinal": 14, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 15, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 16, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8", + "Uuid" + ] }, - "hash": "229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0" + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true, + true, + true, + true, + false + ] + }, + "hash": "229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0" } diff --git a/rust/.sqlx/query-2a1514685491269ca61a0a898d7491f908b5047e86b13fbff22cf8f58e85cc81.json b/rust/.sqlx/query-2a1514685491269ca61a0a898d7491f908b5047e86b13fbff22cf8f58e85cc81.json new file mode 100644 index 00000000000..970ef472668 --- /dev/null +++ b/rust/.sqlx/query-2a1514685491269ca61a0a898d7491f908b5047e86b13fbff22cf8f58e85cc81.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, team_id, status, name, description FROM posthog_errortrackingissue\n WHERE team_id = $1 AND id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "description", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4", + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true + ] + }, + "hash": "2a1514685491269ca61a0a898d7491f908b5047e86b13fbff22cf8f58e85cc81" +} diff --git a/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json b/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json index cfcbdd6288f..1bdb0efcc34 100644 --- a/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json +++ b/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json @@ -1,18 +1,23 @@ { - "db_name": "PostgreSQL", - "query": "UPDATE cyclotron_jobs SET state = 'running', lock_id = $1, last_heartbeat=NOW() WHERE id = $2 returning queue_name", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "queue_name", - "type_info": "Text" - } - ], - "parameters": { - "Left": ["Uuid", "Uuid"] - }, - "nullable": [false] + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET state = 'running', lock_id = $1, last_heartbeat=NOW() WHERE id = $2 returning queue_name", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "queue_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] }, - "hash": "2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89" + "nullable": [ + false + ] + }, + "hash": "2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89" } diff --git a/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json b/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json index e69786b54b2..affa061018e 100644 --- a/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json +++ b/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json @@ -1,12 +1,15 @@ { - "db_name": "PostgreSQL", - "query": "UPDATE cyclotron_jobs SET state = 'available', lock_id = NULL, queue_name = $1 WHERE id = $2", - "describe": { - "columns": [], - "parameters": { - "Left": ["Text", "Uuid"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET state = 'available', lock_id = NULL, queue_name = $1 WHERE id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Uuid" + ] }, - "hash": "2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7" + "nullable": [] + }, + "hash": "2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7" } diff --git a/rust/.sqlx/query-361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213.json b/rust/.sqlx/query-361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213.json index 7a0918905c0..8ea73c5d641 100644 --- a/rust/.sqlx/query-361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213.json +++ b/rust/.sqlx/query-361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213.json @@ -1,43 +1,53 @@ { - "db_name": "PostgreSQL", - "query": "SELECT id, team_id, ref as set_ref, storage_ptr, created_at, failure_reason\n FROM posthog_errortrackingsymbolset\n WHERE team_id = $1 AND ref = $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "set_ref", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "storage_ptr", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "created_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 5, - "name": "failure_reason", - "type_info": "Text" - } - ], - "parameters": { - "Left": ["Int4", "Text"] - }, - "nullable": [false, false, false, true, false, true] + "db_name": "PostgreSQL", + "query": "SELECT id, team_id, ref as set_ref, storage_ptr, created_at, failure_reason\n FROM posthog_errortrackingsymbolset\n WHERE team_id = $1 AND ref = $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "set_ref", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "storage_ptr", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "failure_reason", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4", + "Text" + ] }, - "hash": "361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213" + "nullable": [ + false, + false, + false, + true, + false, + true + ] + }, + "hash": "361eb26d51d253242a8af33e45d8686c4393f6420e61b6b141143974ff362213" } diff --git a/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json b/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json index 5c6b66d3f87..89a968bc010 100644 --- a/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json +++ b/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json @@ -1,18 +1,23 @@ { - "db_name": "PostgreSQL", - "query": "\nSELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 AND janitor_touch_count >= $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - } - ], - "parameters": { - "Left": ["Timestamptz", "Int2"] - }, - "nullable": [false] + "db_name": "PostgreSQL", + "query": "\nSELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 AND janitor_touch_count >= $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Int2" + ] }, - "hash": "385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025" + "nullable": [ + false + ] + }, + "hash": "385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025" } diff --git a/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json b/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json index aa1ff7e7cf3..890675aa24d 100644 --- a/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json +++ b/rust/.sqlx/query-42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e.json @@ -1,12 +1,16 @@ { - "db_name": "PostgreSQL", - "query": "INSERT INTO posthog_eventproperty (event, property, team_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", - "describe": { - "columns": [], - "parameters": { - "Left": ["Varchar", "Varchar", "Int4"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "INSERT INTO posthog_eventproperty (event, property, team_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Varchar", + "Varchar", + "Int4" + ] }, - "hash": "42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e" + "nullable": [] + }, + "hash": "42e393046a686e6a69daa920dc2ab521aa6f393027c399a0c40139f5f8a0a45e" } diff --git a/rust/.sqlx/query-44eb698252059821770eaaf5b8213132c3e7bf297834cd42228b35bf3e424dd7.json b/rust/.sqlx/query-44eb698252059821770eaaf5b8213132c3e7bf297834cd42228b35bf3e424dd7.json deleted file mode 100644 index 8d5f49a0735..00000000000 --- a/rust/.sqlx/query-44eb698252059821770eaaf5b8213132c3e7bf297834cd42228b35bf3e424dd7.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO posthog_errortrackingissue (id, team_id, status, created_at)\n VALUES ($1, $2, $3, NOW())\n ON CONFLICT (id) DO NOTHING\n RETURNING (xmax = 0) AS was_inserted\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "was_inserted", - "type_info": "Bool" - } - ], - "parameters": { - "Left": ["Uuid", "Int4", "Text"] - }, - "nullable": [null] - }, - "hash": "44eb698252059821770eaaf5b8213132c3e7bf297834cd42228b35bf3e424dd7" -} diff --git a/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json b/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json index 1f374d8084c..515f058fad9 100644 --- a/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json +++ b/rust/.sqlx/query-4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b.json @@ -1,48 +1,59 @@ { - "db_name": "PostgreSQL", - "query": "\n SELECT raw_id, team_id, created_at, symbol_set_id, contents, resolved, context\n FROM posthog_errortrackingstackframe\n WHERE raw_id = $1 AND team_id = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "raw_id", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 3, - "name": "symbol_set_id", - "type_info": "Uuid" - }, - { - "ordinal": 4, - "name": "contents", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "resolved", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "context", - "type_info": "Text" - } - ], - "parameters": { - "Left": ["Text", "Int4"] - }, - "nullable": [false, false, false, true, false, false, true] + "db_name": "PostgreSQL", + "query": "\n SELECT raw_id, team_id, created_at, symbol_set_id, contents, resolved, context\n FROM posthog_errortrackingstackframe\n WHERE raw_id = $1 AND team_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "raw_id", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "symbol_set_id", + "type_info": "Uuid" + }, + { + "ordinal": 4, + "name": "contents", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "resolved", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "context", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text", + "Int4" + ] }, - "hash": "4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b" + "nullable": [ + false, + false, + false, + true, + false, + false, + true + ] + }, + "hash": "4b24f800767bc20852115e7406f8dc46c18f4950a7951d2c412dcba4d13fb56b" } diff --git a/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json b/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json index 2ff58c66714..6bbfb17e16a 100644 --- a/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json +++ b/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json @@ -1,12 +1,14 @@ { - "db_name": "PostgreSQL", - "query": "\nWITH stalled AS (\n SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET state = 'available', lock_id = NULL, last_heartbeat = NULL, janitor_touch_count = janitor_touch_count + 1\nFROM stalled\nWHERE cyclotron_jobs.id = stalled.id\n ", - "describe": { - "columns": [], - "parameters": { - "Left": ["Timestamptz"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "\nWITH stalled AS (\n SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET state = 'available', lock_id = NULL, last_heartbeat = NULL, janitor_touch_count = janitor_touch_count + 1\nFROM stalled\nWHERE cyclotron_jobs.id = stalled.id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz" + ] }, - "hash": "54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d" + "nullable": [] + }, + "hash": "54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d" } diff --git a/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json b/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json index d70d4c9d33a..44118c74052 100644 --- a/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json +++ b/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json @@ -1,18 +1,22 @@ { - "db_name": "PostgreSQL", - "query": "SELECT COUNT(*) FROM cyclotron_jobs WHERE queue_name = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count", - "type_info": "Int8" - } - ], - "parameters": { - "Left": ["Text"] - }, - "nullable": [null] + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*) FROM cyclotron_jobs WHERE queue_name = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text" + ] }, - "hash": "78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8" + "nullable": [ + null + ] + }, + "hash": "78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8" } diff --git a/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json b/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json index 188cd0be389..6f3e42a0a8b 100644 --- a/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json +++ b/rust/.sqlx/query-917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687.json @@ -1,12 +1,17 @@ { - "db_name": "PostgreSQL", - "query": "\n INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, last_seen_at, created_at)\n VALUES ($1, $2, NULL, NULL, $3, $4, NOW()) ON CONFLICT\n ON CONSTRAINT posthog_eventdefinition_team_id_name_80fa0b87_uniq\n DO UPDATE SET last_seen_at = $4\n ", - "describe": { - "columns": [], - "parameters": { - "Left": ["Uuid", "Varchar", "Int4", "Timestamptz"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "\n INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, last_seen_at, created_at)\n VALUES ($1, $2, NULL, NULL, $3, $4, NOW()) ON CONFLICT\n ON CONSTRAINT posthog_eventdefinition_team_id_name_80fa0b87_uniq\n DO UPDATE SET last_seen_at = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Int4", + "Timestamptz" + ] }, - "hash": "917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687" + "nullable": [] + }, + "hash": "917e3d14c15558a1e0bb1d7015ed687eb545ee9d4ccbb8b69c958a357d49f687" } diff --git a/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json b/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json index 51fb1b01812..244f029a6a4 100644 --- a/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json +++ b/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json @@ -1,18 +1,23 @@ { - "db_name": "PostgreSQL", - "query": "SELECT vm_state FROM cyclotron_jobs WHERE id = $1 AND lock_id = $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "vm_state", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": ["Uuid", "Uuid"] - }, - "nullable": [true] + "db_name": "PostgreSQL", + "query": "SELECT vm_state FROM cyclotron_jobs WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "vm_state", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] }, - "hash": "aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632" + "nullable": [ + true + ] + }, + "hash": "aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632" } diff --git a/rust/.sqlx/query-ad528f712bdaf75a82293018e3dec3544acd0228776b8535f22438db59002e1f.json b/rust/.sqlx/query-ad528f712bdaf75a82293018e3dec3544acd0228776b8535f22438db59002e1f.json deleted file mode 100644 index 1f2ff6b9069..00000000000 --- a/rust/.sqlx/query-ad528f712bdaf75a82293018e3dec3544acd0228776b8535f22438db59002e1f.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, status FROM posthog_errortrackingissue\n WHERE team_id = $1 AND id = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "status", - "type_info": "Text" - } - ], - "parameters": { - "Left": ["Int4", "Uuid"] - }, - "nullable": [false, false, false] - }, - "hash": "ad528f712bdaf75a82293018e3dec3544acd0228776b8535f22438db59002e1f" -} diff --git a/rust/.sqlx/query-b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b.json b/rust/.sqlx/query-b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b.json index ed79a7102f5..8a40f0f253f 100644 --- a/rust/.sqlx/query-b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b.json +++ b/rust/.sqlx/query-b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b.json @@ -1,23 +1,26 @@ { - "db_name": "PostgreSQL", - "query": "SELECT COUNT(*), queue_name FROM cyclotron_jobs WHERE state = 'available' AND scheduled <= NOW() GROUP BY queue_name", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "queue_name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [null, false] + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*), queue_name FROM cyclotron_jobs WHERE state = 'available' AND scheduled <= NOW() GROUP BY queue_name", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "queue_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] }, - "hash": "b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b" + "nullable": [ + null, + false + ] + }, + "hash": "b420ccc79fa7847f65246adf76a074995829d1372fe8ec1fd683f9524bcebb8b" } diff --git a/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json b/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json index 8f201d80503..7733c346c4b 100644 --- a/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json +++ b/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json @@ -1,12 +1,16 @@ { - "db_name": "PostgreSQL", - "query": "INSERT INTO cyclotron_dead_letter_metadata (job_id, original_queue_name, reason, dlq_time) VALUES ($1, $2, $3, NOW())", - "describe": { - "columns": [], - "parameters": { - "Left": ["Uuid", "Text", "Text"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "INSERT INTO cyclotron_dead_letter_metadata (job_id, original_queue_name, reason, dlq_time) VALUES ($1, $2, $3, NOW())", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text" + ] }, - "hash": "b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b" + "nullable": [] + }, + "hash": "b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b" } diff --git a/rust/.sqlx/query-c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee.json b/rust/.sqlx/query-c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee.json index f6ad8e4e892..608008e09ad 100644 --- a/rust/.sqlx/query-c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee.json +++ b/rust/.sqlx/query-c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee.json @@ -1,18 +1,23 @@ { - "db_name": "PostgreSQL", - "query": "SELECT group_type_index FROM posthog_grouptypemapping WHERE group_type = $1 AND team_id = $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "group_type_index", - "type_info": "Int4" - } - ], - "parameters": { - "Left": ["Text", "Int4"] - }, - "nullable": [false] + "db_name": "PostgreSQL", + "query": "SELECT group_type_index FROM posthog_grouptypemapping WHERE group_type = $1 AND team_id = $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "group_type_index", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Int4" + ] }, - "hash": "c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee" + "nullable": [ + false + ] + }, + "hash": "c6ff00fcbbc77c8f5c1b3fe2f3352ea79485e403b9e17b6c37259ea0612065ee" } diff --git a/rust/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json b/rust/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json index fe174820c3a..d996dae8bd4 100644 --- a/rust/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json +++ b/rust/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json @@ -1,123 +1,133 @@ { - "db_name": "PostgreSQL", - "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "state: JobState", - "type_info": { - "Custom": { - "name": "jobstate", - "kind": { - "Enum": ["available", "completed", "failed", "running", "paused"] - } - } - } - }, - { - "ordinal": 3, - "name": "queue_name", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "priority", - "type_info": "Int2" - }, - { - "ordinal": 5, - "name": "function_id", - "type_info": "Uuid" - }, - { - "ordinal": 6, - "name": "created", - "type_info": "Timestamptz" - }, - { - "ordinal": 7, - "name": "last_transition", - "type_info": "Timestamptz" - }, - { - "ordinal": 8, - "name": "scheduled", - "type_info": "Timestamptz" - }, - { - "ordinal": 9, - "name": "transition_count", - "type_info": "Int2" - }, - { - "ordinal": 10, - "name": "vm_state", - "type_info": "Bytea" - }, - { - "ordinal": 11, - "name": "metadata", - "type_info": "Bytea" - }, - { - "ordinal": 12, - "name": "parameters", - "type_info": "Bytea" - }, - { - "ordinal": 13, - "name": "blob", - "type_info": "Bytea" - }, - { - "ordinal": 14, - "name": "lock_id", - "type_info": "Uuid" - }, - { - "ordinal": 15, - "name": "last_heartbeat", - "type_info": "Timestamptz" - }, - { - "ordinal": 16, - "name": "janitor_touch_count", - "type_info": "Int2" + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": [ + "available", + "completed", + "failed", + "running", + "paused" + ] } - ], - "parameters": { - "Left": ["Text", "Int8", "Uuid"] - }, - "nullable": [ - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - true, - true, - true, - true, - true, - true, - false - ] + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Bytea" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Bytea" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Bytea" + }, + { + "ordinal": 13, + "name": "blob", + "type_info": "Bytea" + }, + { + "ordinal": 14, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 15, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 16, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8", + "Uuid" + ] }, - "hash": "ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068" + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + false + ] + }, + "hash": "ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068" } diff --git a/rust/.sqlx/query-e0c6790eccd2e7505d86ed570f093c27fdf4d88145cd7eaf4bbd39e49a7452f6.json b/rust/.sqlx/query-e0c6790eccd2e7505d86ed570f093c27fdf4d88145cd7eaf4bbd39e49a7452f6.json new file mode 100644 index 00000000000..a0513cd7b42 --- /dev/null +++ b/rust/.sqlx/query-e0c6790eccd2e7505d86ed570f093c27fdf4d88145cd7eaf4bbd39e49a7452f6.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO posthog_errortrackingissue (id, team_id, status, name, description, created_at)\n VALUES ($1, $2, $3, $4, $5, NOW())\n ON CONFLICT (id) DO NOTHING\n RETURNING (xmax = 0) AS was_inserted\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "was_inserted", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Text", + "Text", + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "e0c6790eccd2e7505d86ed570f093c27fdf4d88145cd7eaf4bbd39e49a7452f6" +} diff --git a/rust/.sqlx/query-e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51.json b/rust/.sqlx/query-e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51.json index 145fe9bf7bc..de7157b9b0a 100644 --- a/rust/.sqlx/query-e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51.json +++ b/rust/.sqlx/query-e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51.json @@ -1,38 +1,44 @@ { - "db_name": "PostgreSQL", - "query": "\nWITH to_delete AS (\n DELETE FROM cyclotron_jobs\n WHERE state IN ('failed', 'completed')\n RETURNING last_transition, team_id, function_id::text, state::text\n),\naggregated_data AS (\n SELECT\n date_trunc('hour', last_transition) AS hour,\n team_id,\n function_id,\n state,\n COUNT(*) AS count\n FROM to_delete\n GROUP BY hour, team_id, function_id, state\n)\nSELECT\n hour as \"hour!\",\n team_id as \"team_id!\",\n function_id,\n state as \"state!\",\n count as \"count!\"\nFROM aggregated_data", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "hour!", - "type_info": "Timestamptz" - }, - { - "ordinal": 1, - "name": "team_id!", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "function_id", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "state!", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [null, false, null, null, null] + "db_name": "PostgreSQL", + "query": "\nWITH to_delete AS (\n DELETE FROM cyclotron_jobs\n WHERE state IN ('failed', 'completed')\n RETURNING last_transition, team_id, function_id::text, state::text\n),\naggregated_data AS (\n SELECT\n date_trunc('hour', last_transition) AS hour,\n team_id,\n function_id,\n state,\n COUNT(*) AS count\n FROM to_delete\n GROUP BY hour, team_id, function_id, state\n)\nSELECT\n hour as \"hour!\",\n team_id as \"team_id!\",\n function_id,\n state as \"state!\",\n count as \"count!\"\nFROM aggregated_data", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "hour!", + "type_info": "Timestamptz" + }, + { + "ordinal": 1, + "name": "team_id!", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "function_id", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "state!", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] }, - "hash": "e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51" + "nullable": [ + null, + false, + null, + null, + null + ] + }, + "hash": "e842f1ed33747bde4570c6d861d856c4cbd8beb519df8212212017dda9d06c51" } diff --git a/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json b/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json index 78ca221cb2f..a54bb9565ea 100644 --- a/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json +++ b/rust/.sqlx/query-eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4.json @@ -1,12 +1,20 @@ { - "db_name": "PostgreSQL", - "query": "\n INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type)\n VALUES ($1, $2, $3, $4, $5, NULL, NULL, $6, $7)\n ON CONFLICT (team_id, name, type, coalesce(group_type_index, -1))\n DO UPDATE SET property_type=EXCLUDED.property_type WHERE posthog_propertydefinition.property_type IS NULL\n ", - "describe": { - "columns": [], - "parameters": { - "Left": ["Uuid", "Varchar", "Int2", "Int2", "Bool", "Int4", "Varchar"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "\n INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type)\n VALUES ($1, $2, $3, $4, $5, NULL, NULL, $6, $7)\n ON CONFLICT (team_id, name, type, coalesce(group_type_index, -1))\n DO UPDATE SET property_type=EXCLUDED.property_type WHERE posthog_propertydefinition.property_type IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Varchar", + "Int2", + "Int2", + "Bool", + "Int4", + "Varchar" + ] }, - "hash": "eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4" + "nullable": [] + }, + "hash": "eecef0ce664dfe65dff4452d92a29c948a291ea8218bbbb4e25cd1ad36dbe9f4" } diff --git a/rust/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json b/rust/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json index 6139be53026..4f91e01e690 100644 --- a/rust/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json +++ b/rust/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json @@ -1,31 +1,37 @@ { - "db_name": "PostgreSQL", - "query": "\nINSERT INTO cyclotron_jobs\n (\n id,\n team_id,\n function_id,\n created,\n lock_id,\n last_heartbeat,\n janitor_touch_count,\n transition_count,\n last_transition,\n queue_name,\n state,\n scheduled,\n priority,\n vm_state,\n metadata,\n parameters,\n blob\n )\nVALUES\n ($1, $2, $3, NOW(), NULL, NULL, 0, 0, NOW(), $4, $5, $6, $7, $8, $9, $10, $11)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Uuid", - "Int4", - "Uuid", - "Text", - { - "Custom": { - "name": "jobstate", - "kind": { - "Enum": ["available", "completed", "failed", "running", "paused"] - } - } - }, - "Timestamptz", - "Int2", - "Bytea", - "Bytea", - "Bytea", - "Bytea" - ] + "db_name": "PostgreSQL", + "query": "\nINSERT INTO cyclotron_jobs\n (\n id,\n team_id,\n function_id,\n created,\n lock_id,\n last_heartbeat,\n janitor_touch_count,\n transition_count,\n last_transition,\n queue_name,\n state,\n scheduled,\n priority,\n vm_state,\n metadata,\n parameters,\n blob\n )\nVALUES\n ($1, $2, $3, NOW(), NULL, NULL, 0, 0, NOW(), $4, $5, $6, $7, $8, $9, $10, $11)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Uuid", + "Text", + { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": [ + "available", + "completed", + "failed", + "running", + "paused" + ] + } + } }, - "nullable": [] + "Timestamptz", + "Int2", + "Bytea", + "Bytea", + "Bytea", + "Bytea" + ] }, - "hash": "f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c" + "nullable": [] + }, + "hash": "f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c" } diff --git a/rust/.sqlx/query-fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea.json b/rust/.sqlx/query-fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea.json index 698578f7287..791e4ca7135 100644 --- a/rust/.sqlx/query-fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea.json +++ b/rust/.sqlx/query-fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea.json @@ -1,38 +1,49 @@ { - "db_name": "PostgreSQL", - "query": "\n INSERT INTO posthog_errortrackingissuefingerprintv2 (id, team_id, issue_id, fingerprint, version, created_at)\n VALUES ($1, $2, $3, $4, 0, NOW())\n ON CONFLICT (team_id, fingerprint) DO NOTHING\n RETURNING id, team_id, issue_id, fingerprint, version\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "team_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "issue_id", - "type_info": "Uuid" - }, - { - "ordinal": 3, - "name": "fingerprint", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "version", - "type_info": "Int8" - } - ], - "parameters": { - "Left": ["Uuid", "Int4", "Uuid", "Text"] - }, - "nullable": [false, false, false, false, false] + "db_name": "PostgreSQL", + "query": "\n INSERT INTO posthog_errortrackingissuefingerprintv2 (id, team_id, issue_id, fingerprint, version, created_at)\n VALUES ($1, $2, $3, $4, 0, NOW())\n ON CONFLICT (team_id, fingerprint) DO NOTHING\n RETURNING id, team_id, issue_id, fingerprint, version\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "issue_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "fingerprint", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Uuid", + "Text" + ] }, - "hash": "fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea" + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "fd6745e4ed7575699286d9828c9f6c959ba804dab9c1e5ba39979d19782582ea" } diff --git a/rust/.sqlx/query-fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a.json b/rust/.sqlx/query-fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a.json index 631e5d2d017..da2b7d92cb9 100644 --- a/rust/.sqlx/query-fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a.json +++ b/rust/.sqlx/query-fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a.json @@ -1,12 +1,19 @@ { - "db_name": "PostgreSQL", - "query": "INSERT INTO posthog_errortrackingsymbolset (id, team_id, ref, storage_ptr, failure_reason, created_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (team_id, ref) DO UPDATE SET storage_ptr = $4", - "describe": { - "columns": [], - "parameters": { - "Left": ["Uuid", "Int4", "Text", "Text", "Text", "Timestamptz"] - }, - "nullable": [] + "db_name": "PostgreSQL", + "query": "INSERT INTO posthog_errortrackingsymbolset (id, team_id, ref, storage_ptr, failure_reason, created_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (team_id, ref) DO UPDATE SET storage_ptr = $4", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Text", + "Text", + "Text", + "Timestamptz" + ] }, - "hash": "fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a" + "nullable": [] + }, + "hash": "fda1f4ef877d1a71dbb6345d71c21c0eae35356f7b92e969a12a839b41cd360a" } diff --git a/rust/Cargo.lock b/rust/Cargo.lock index b99943cc4e5..9a263a87878 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -1808,7 +1808,9 @@ dependencies = [ "futures", "health", "maxminddb", + "moka", "once_cell", + "petgraph", "rand", "redis", "regex", @@ -3046,9 +3048,13 @@ version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32cf62eb4dd975d2dde76432fb1075c49e3ee2331cf36f1f8fd4b66550d32b6f" dependencies = [ + "async-lock 3.4.0", + "async-trait", "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", + "event-listener 5.3.1", + "futures-util", "once_cell", "parking_lot", "quanta 0.12.2", diff --git a/rust/cymbal/src/app_context.rs b/rust/cymbal/src/app_context.rs index dc0c513f332..ef96c84fa7a 100644 --- a/rust/cymbal/src/app_context.rs +++ b/rust/cymbal/src/app_context.rs @@ -16,6 +16,7 @@ use crate::{ frames::resolver::Resolver, symbol_store::{ caching::{Caching, SymbolSetCache}, + concurrency, saving::Saving, sourcemap::SourcemapProvider, Catalog, S3Client, @@ -80,13 +81,18 @@ impl AppContext { config.ss_prefix.clone(), ); let caching_smp = Caching::new(saving_smp, ss_cache); + // We want to fetch each sourcemap from the outside world + // exactly once, and if it isn't in the cache, load/parse + // it from s3 exactly once too. Limiting the per symbol set + // reference concurreny to 1 ensures this. + let limited_smp = concurrency::AtMostOne::new(caching_smp); info!( "AppContext initialized, subscribed to topic {}", config.consumer.kafka_consumer_topic ); - let catalog = Catalog::new(caching_smp); + let catalog = Catalog::new(limited_smp); let resolver = Resolver::new(config); Ok(Self { diff --git a/rust/cymbal/src/frames/resolver.rs b/rust/cymbal/src/frames/resolver.rs index b36d00fee6f..5e6cafce140 100644 --- a/rust/cymbal/src/frames/resolver.rs +++ b/rust/cymbal/src/frames/resolver.rs @@ -82,7 +82,7 @@ mod test { sourcemap::SourcemapProvider, Catalog, S3Client, }, - types::{ErrProps, Stacktrace}, + types::{RawErrProps, Stacktrace}, }; const CHUNK_PATH: &str = "/static/chunk-PGUQKT6S.js"; @@ -131,10 +131,10 @@ mod test { fn get_test_frame(server: &MockServer) -> RawFrame { let exception: ClickHouseEvent = serde_json::from_str(EXAMPLE_EXCEPTION).unwrap(); - let props: ErrProps = serde_json::from_str(&exception.properties.unwrap()).unwrap(); + let mut props: RawErrProps = serde_json::from_str(&exception.properties.unwrap()).unwrap(); let Stacktrace::Raw { frames: mut test_stack, - } = props.exception_list.unwrap().swap_remove(0).stack.unwrap() + } = props.exception_list.swap_remove(0).stack.unwrap() else { panic!("Expected a Raw stacktrace") }; diff --git a/rust/cymbal/src/issue_resolution.rs b/rust/cymbal/src/issue_resolution.rs index 3d7ab3ddd6a..6778ff10012 100644 --- a/rust/cymbal/src/issue_resolution.rs +++ b/rust/cymbal/src/issue_resolution.rs @@ -1,7 +1,10 @@ use sqlx::postgres::any::AnyConnectionBackend; use uuid::Uuid; -use crate::error::UnhandledError; +use crate::{ + error::UnhandledError, + types::{FingerprintedErrProps, OutputErrProps}, +}; pub struct IssueFingerprintOverride { pub id: Uuid, @@ -15,14 +18,18 @@ pub struct Issue { pub id: Uuid, pub team_id: i32, pub status: String, + pub name: Option, + pub description: Option, } impl Issue { - pub fn new(team_id: i32) -> Self { + pub fn new(team_id: i32, name: String, description: String) -> Self { Self { id: Uuid::new_v4(), team_id, status: "active".to_string(), // TODO - we should at some point use an enum here + name: Some(name), + description: Some(description), } } @@ -37,7 +44,7 @@ impl Issue { let res = sqlx::query_as!( Issue, r#" - SELECT id, team_id, status FROM posthog_errortrackingissue + SELECT id, team_id, status, name, description FROM posthog_errortrackingissue WHERE team_id = $1 AND id = $2 "#, team_id, @@ -55,14 +62,16 @@ impl Issue { { let did_insert = sqlx::query_scalar!( r#" - INSERT INTO posthog_errortrackingissue (id, team_id, status, created_at) - VALUES ($1, $2, $3, NOW()) + INSERT INTO posthog_errortrackingissue (id, team_id, status, name, description, created_at) + VALUES ($1, $2, $3, $4, $5, NOW()) ON CONFLICT (id) DO NOTHING RETURNING (xmax = 0) AS was_inserted "#, self.id, self.team_id, - self.status + self.status, + self.name, + self.description ) .fetch_one(executor) .await?; @@ -127,30 +136,40 @@ impl IssueFingerprintOverride { pub async fn resolve_issue<'c, A>( con: A, - fingerprint: &str, team_id: i32, -) -> Result + fingerprinted: FingerprintedErrProps, +) -> Result where A: sqlx::Acquire<'c, Database = sqlx::Postgres>, { let mut conn = con.acquire().await?; // If an override already exists, just fast-path, skipping the transaction if let Some(issue_override) = - IssueFingerprintOverride::load(&mut *conn, team_id, fingerprint).await? + IssueFingerprintOverride::load(&mut *conn, team_id, &fingerprinted.fingerprint).await? { - return Ok(issue_override); + return Ok(fingerprinted.to_output(issue_override.issue_id)); } + // UNWRAP: We never resolve an issue for an exception with no exception list + let first = fingerprinted.exception_list.first().unwrap(); + let new_name = first.exception_type.clone(); + let new_description = first.exception_message.clone(); + // Start a transaction, so we can roll it back on override insert failure conn.begin().await?; // Insert a new issue - let issue = Issue::new(team_id); + let issue = Issue::new(team_id, new_name, new_description); // We don't actually care if we insert the issue here or not - conflicts aren't possible at // this stage. issue.insert(&mut *conn).await?; // Insert the fingerprint override - let issue_override = - IssueFingerprintOverride::create_or_load(&mut *conn, team_id, fingerprint, &issue).await?; + let issue_override = IssueFingerprintOverride::create_or_load( + &mut *conn, + team_id, + &fingerprinted.fingerprint, + &issue, + ) + .await?; // If we actually inserted a new row for the issue override, commit the transaction, // saving both the issue and the override. Otherwise, rollback the transaction, and @@ -160,7 +179,8 @@ where conn.rollback().await?; } else { conn.commit().await?; + // TODO - emit new issue and override to kafka } - Ok(issue_override) + Ok(fingerprinted.to_output(issue_override.issue_id)) } diff --git a/rust/cymbal/src/lib.rs b/rust/cymbal/src/lib.rs index b88634c1828..09ea0a73f49 100644 --- a/rust/cymbal/src/lib.rs +++ b/rust/cymbal/src/lib.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::sync::Arc; use app_context::AppContext; use common_types::ClickHouseEvent; @@ -6,8 +6,7 @@ use error::{EventError, UnhandledError}; use fingerprinting::generate_fingerprint; use issue_resolution::resolve_issue; use tracing::warn; -use types::{ErrProps, Exception, Stacktrace}; -use uuid::Uuid; +use types::{Exception, RawErrProps, Stacktrace}; pub mod app_context; pub mod config; @@ -21,7 +20,7 @@ pub mod symbol_store; pub mod types; pub async fn handle_event( - context: &AppContext, + context: Arc, mut event: ClickHouseEvent, ) -> Result, UnhandledError> { let mut props = match get_props(&event) { @@ -33,39 +32,34 @@ pub async fn handle_event( } }; - let exceptions = match take_exception_list(event.uuid, &mut props) { - Ok(r) => r, - Err(e) => { - warn!("Failed to take exception list: {}", e); - // Add an error message, and patch the event properties back up. - props.add_error_message(format!("Failed to take exception list: {}", e)); - event.properties = Some(serde_json::to_string(&props).unwrap()); - return Ok(Some(event)); - } - }; + let exceptions = std::mem::take(&mut props.exception_list); + + if exceptions.is_empty() { + props.add_error_message("No exceptions found on exception event"); + event.properties = Some(serde_json::to_string(&props).unwrap()); + return Ok(Some(event)); + } let mut results = Vec::new(); for exception in exceptions.into_iter() { // If we get an unhandled error during exception processing, we return an error, which should // cause the caller to drop the offset without storing it - unhandled exceptions indicate // a dependency is down, or some bug, adn we want to take lag in those situations. - results.push(process_exception(context, event.team_id, exception).await?); + results.push(process_exception(context.clone(), event.team_id, exception).await?); } let fingerprint = generate_fingerprint(&results); + props.exception_list = results; + let fingerprinted = props.to_fingerprinted(fingerprint.clone()); - let issue_override = resolve_issue(&context.pool, &fingerprint, event.team_id).await?; + let output = resolve_issue(&context.pool, event.team_id, fingerprinted).await?; - props.fingerprint = Some(fingerprint); - props.resolved_issue_id = Some(issue_override.issue_id); - props.exception_list = Some(results); - - event.properties = Some(serde_json::to_string(&props).unwrap()); + event.properties = Some(serde_json::to_string(&output).unwrap()); Ok(Some(event)) } -fn get_props(event: &ClickHouseEvent) -> Result { +fn get_props(event: &ClickHouseEvent) -> Result { if event.event != "$exception" { return Err(EventError::WrongEventType(event.event.clone(), event.uuid)); } @@ -74,7 +68,7 @@ fn get_props(event: &ClickHouseEvent) -> Result { return Err(EventError::NoProperties(event.uuid)); }; - let properties: ErrProps = match serde_json::from_str(properties) { + let properties: RawErrProps = match serde_json::from_str(properties) { Ok(r) => r, Err(e) => { return Err(EventError::InvalidProperties(event.uuid, e.to_string())); @@ -84,20 +78,8 @@ fn get_props(event: &ClickHouseEvent) -> Result { Ok(properties) } -fn take_exception_list(event_id: Uuid, props: &mut ErrProps) -> Result, EventError> { - let Some(exception_list) = props.exception_list.as_mut() else { - return Err(EventError::NoExceptionList(event_id)); - }; - - if exception_list.is_empty() { - return Err(EventError::EmptyExceptionList(event_id)); - } - - Ok(std::mem::take(exception_list)) -} - async fn process_exception( - context: &AppContext, + context: Arc, team_id: i32, mut e: Exception, ) -> Result { @@ -114,40 +96,43 @@ async fn process_exception( return Ok(e); } - let mut results = Vec::with_capacity(frames.len()); + let mut handles = Vec::with_capacity(frames.len()); + let mut resolved_frames = Vec::with_capacity(frames.len()); - // Cluster the frames by symbol set - // TODO - we really want to cluster across exceptions (and even across events), - // rather than just within a single exception - let mut groups = HashMap::new(); - for (i, frame) in frames.into_iter().enumerate() { - let group = groups - .entry(frame.symbol_set_ref()) - .or_insert_with(Vec::new); - group.push((i, frame)); - } - - for (_, frames) in groups.into_iter() { - for (i, frame) in frames { - let resolved_frame = context + for frame in frames.into_iter() { + let context = context.clone(); + // Spawn a concurrent task for resolving every frame - we're careful elsewhere to + // ensure this kind of concurrency is fine, although this "throw it at the wall" + // data flow structure is pretty questionable. Once we switch to handling more than + // 1 event at a time, we should re-group frames into associated groups and then + // process those groups in-order (but the individual frames in them can still be + // thrown at the wall), with some cross-group concurrency. + handles.push(tokio::spawn(async move { + context .resolver .resolve(&frame, team_id, &context.pool, &context.catalog) - .await?; - results.push((i, resolved_frame)); - } + .await + })); } - results.sort_unstable_by_key(|(i, _)| *i); + // Collect the results + for handle in handles { + // Joinhandles wrap the returned type in a Result, because if the task panics, + // tokio catches it and returns an error. If any of our tasks panicked, we want + // to propogate that panic, so we unwrap the outer Result here. + let res = handle.await.unwrap()?; + resolved_frames.push(res) + } e.stack = Some(Stacktrace::Resolved { - frames: results.into_iter().map(|(_, frame)| frame).collect(), + frames: resolved_frames, }); Ok(e) } -// This is stupidly expensive, since it round-trips the event through JSON, lol. We should change ClickhouseEvent to only do serde at the -// edges +// This is expensive, since it round-trips the event through JSON. +// We could maybe change ClickhouseEvent to only do serde at the edges pub fn add_error_to_event( event: &mut ClickHouseEvent, e: impl ToString, diff --git a/rust/cymbal/src/main.rs b/rust/cymbal/src/main.rs index 8955a976761..ef96e8e490a 100644 --- a/rust/cymbal/src/main.rs +++ b/rust/cymbal/src/main.rs @@ -78,7 +78,7 @@ async fn main() { }; metrics::counter!(EVENT_RECEIVED).increment(1); - let _processed_event = match handle_event(&context, event).await { + let _processed_event = match handle_event(context.clone(), event).await { Ok(r) => { offset.store().unwrap(); r diff --git a/rust/cymbal/src/symbol_store/caching.rs b/rust/cymbal/src/symbol_store/caching.rs index 2fd24a2e9f3..5cab851fdac 100644 --- a/rust/cymbal/src/symbol_store/caching.rs +++ b/rust/cymbal/src/symbol_store/caching.rs @@ -12,6 +12,8 @@ use crate::{ use super::{saving::Saveable, Fetcher, Parser, Provider}; +// This is a type-specific symbol provider layer, designed to +// wrap some inner provider and provide a type-safe caching layer pub struct Caching

{ inner: P, cache: Arc>, @@ -42,16 +44,30 @@ where return Ok(set); } metrics::counter!(STORE_CACHE_MISSES).increment(1); + drop(cache); + + // Do the fetch, not holding the lock across it to allow + // concurrent fetches to occur (de-duping fetches is + // up to the caller of `lookup`, since relying on the + // cache to do it means assuming the caching layer is + // the outer layer, which is not something the interface + // guarentees) let found = self.inner.fetch(team_id, r).await?; let bytes = found.byte_count(); let parsed = self.inner.parse(found).await?; + let mut cache = self.cache.lock().await; // Re-acquire the cache-wide lock to insert, dropping the ref_lock + let parsed = Arc::new(parsed); cache.insert(cache_key, parsed.clone(), bytes); Ok(parsed) } } +// This is a cache shared across multiple symbol set providers, through the `Caching` above, +// such that two totally different "layers" can share an underlying "pool" of cache space. This +// is injected into the `Caching` layer at construct time, to allow this sharing across multiple +// provider layer "stacks" within the catalog. pub struct SymbolSetCache { // We expect this cache to consist of few, but large, items. // TODO - handle cases where two CachedSymbolSets have identical keys but different types @@ -113,16 +129,14 @@ impl SymbolSetCache { // remove them in a separate pass. let mut to_remove = vec![]; while self.held_bytes > self.max_bytes && !vals.is_empty() { - // We can unwrap here because we know we're not empty from the line above + // We can unwrap here because we know we're not empty from the line above (and + // really, even the !empty check could be skipped - if held_bytes is non-zero, we + // must have at least one element in vals) let (to_remove_key, to_remove_val) = vals.pop().unwrap(); self.held_bytes -= to_remove_val.bytes; to_remove.push(to_remove_key.clone()); } - for key in to_remove { - self.cached.remove(&key); - } - metrics::gauge!(STORE_CACHED_BYTES).set(self.held_bytes as f64); } } diff --git a/rust/cymbal/src/symbol_store/concurrency.rs b/rust/cymbal/src/symbol_store/concurrency.rs new file mode 100644 index 00000000000..466bfa8fce2 --- /dev/null +++ b/rust/cymbal/src/symbol_store/concurrency.rs @@ -0,0 +1,79 @@ +use std::{ + collections::HashMap, + sync::{Arc, Weak}, +}; + +use axum::async_trait; +use tokio::sync::{Mutex, OwnedMutexGuard}; + +use crate::error::Error; + +use super::Provider; + +// Limits the number of concurrent lookups +// for a given symbol set to 1. Note this places +// no concurrency limit /across/ different symbol +// sets, and places no limit on the number of users +// using the returned symbol set concurrently. Designed +// to wrap the caching/saving layers, allowing us to +// ensure we only fetch any given symbol set from the +// outside world exactly once +pub struct AtMostOne

{ + pub inner: P, + limiters: Mutex>>>, +} + +impl

AtMostOne

{ + pub fn new(inner: P) -> Self { + Self { + inner, + limiters: Default::default(), + } + } + + // This needs to be async even though all it does is take a lock because + // the returned owned guard can be (and is) held across an await point, so + // if this was a sync mutex it'd block the executor. It so happens that the + // std library Mutex doesn't provide lock_owned anyway, so we'd have to pull + // in a new dependency if we wanted to write a sync version of this, but + // that's secondary to it actually needing to be async + pub async fn acquire(&self, key: impl ToString) -> OwnedMutexGuard<()> { + let key = key.to_string(); + let mut state = self.limiters.lock().await; + let limiter = state.entry(key).or_default(); + + if let Some(lock) = limiter.upgrade() { + // If there's already a mutex in our shared state for this particular + // source ref, drop the global lock, and wait for the underlying source + // ref to be freed up + drop(state); + lock.lock_owned().await + } else { + // If there's no mutex in our shared state for this particular source ref, + // create one, acquire it, put a Weak to it in the shared state, and return + // the owning mutex guard (and therefore the underling Arc to the new mutex) + let new = Arc::new(Mutex::new(())); + *limiter = Arc::downgrade(&new); + let acquired = new.lock_owned().await; + drop(state); + acquired + } + } +} + +#[async_trait] +impl

Provider for AtMostOne

+where + P: Provider, + P::Ref: ToString + Send, +{ + type Ref = P::Ref; + type Set = P::Set; + + async fn lookup(&self, team_id: i32, r: Self::Ref) -> Result, Error> { + let lock = self.acquire(r.to_string()).await; + let result = self.inner.lookup(team_id, r).await; + drop(lock); + result + } +} diff --git a/rust/cymbal/src/symbol_store/mod.rs b/rust/cymbal/src/symbol_store/mod.rs index 458a0c116da..618db75d232 100644 --- a/rust/cymbal/src/symbol_store/mod.rs +++ b/rust/cymbal/src/symbol_store/mod.rs @@ -8,6 +8,7 @@ use reqwest::Url; use crate::error::Error; pub mod caching; +pub mod concurrency; pub mod saving; pub mod sourcemap; diff --git a/rust/cymbal/src/symbol_store/saving.rs b/rust/cymbal/src/symbol_store/saving.rs index d231d2400ec..a9b3f0ed19b 100644 --- a/rust/cymbal/src/symbol_store/saving.rs +++ b/rust/cymbal/src/symbol_store/saving.rs @@ -147,6 +147,10 @@ where error!("Found a record with no data and no error: {:?}", record); panic!("Found a record with no data and no error"); } + // TODO - this can fail due to changes in how we serialise, or changes in + // the error type - and we should handle that by deleting the symbol record + // and re-fetching, I think (we don't need to cleanup s3 since it's a failure + // case, there is no saved data). let error = serde_json::from_str(&record.failure_reason.unwrap()) .map_err(UnhandledError::from)?; return Err(Error::ResolutionError(error)); diff --git a/rust/cymbal/src/types/mod.rs b/rust/cymbal/src/types/mod.rs index 3084297df89..23981cc29f8 100644 --- a/rust/cymbal/src/types/mod.rs +++ b/rust/cymbal/src/types/mod.rs @@ -46,24 +46,33 @@ pub struct Exception { // of only a small subset. This struct is used to give us a strongly-typed // "view" of those event properties we care about. #[derive(Debug, Deserialize, Serialize, Clone)] -pub struct ErrProps { +pub struct RawErrProps { #[serde(rename = "$exception_list")] - pub exception_list: Option>, // Required from exception producers - we will not process events without this. Optional to support older clients, should eventually be removed - #[serde( - rename = "$exception_fingerprint", - skip_serializing_if = "Option::is_none" - )] - pub fingerprint: Option, // We expect this not to exist when the event is received, and we populate it as part of processing - #[serde( - rename = "$exception_issue_id", - skip_serializing_if = "Option::is_none" - )] - pub resolved_issue_id: Option, // We populate the exception issue id as part of processing + pub exception_list: Vec, #[serde(flatten)] // A catch-all for all the properties we don't "care" about, so when we send back to kafka we don't lose any info pub other: HashMap, } +pub struct FingerprintedErrProps { + pub exception_list: Vec, + pub fingerprint: String, + pub other: HashMap, +} + +// We emit this +#[derive(Debug, Serialize, Clone)] +pub struct OutputErrProps { + #[serde(rename = "$exception_list")] + pub exception_list: Vec, + #[serde(rename = "$exception_fingerprint")] + pub fingerprint: String, + #[serde(rename = "$exception_issue_id")] + pub issue_id: Uuid, + #[serde(flatten)] + pub other: HashMap, +} + impl Exception { pub fn include_in_fingerprint(&self, h: &mut Sha512) { h.update(self.exception_type.as_bytes()); @@ -92,7 +101,7 @@ impl Exception { } } -impl ErrProps { +impl RawErrProps { pub fn add_error_message(&mut self, msg: impl ToString) { let mut errors = match self.other.remove("$cymbal_errors") { Some(serde_json::Value::Array(errors)) => errors, @@ -106,6 +115,25 @@ impl ErrProps { serde_json::Value::Array(errors), ); } + + pub fn to_fingerprinted(self, fingerprint: String) -> FingerprintedErrProps { + FingerprintedErrProps { + exception_list: self.exception_list, + fingerprint, + other: self.other, + } + } +} + +impl FingerprintedErrProps { + pub fn to_output(self, issue_id: Uuid) -> OutputErrProps { + OutputErrProps { + exception_list: self.exception_list, + fingerprint: self.fingerprint, + issue_id, + other: self.other, + } + } } #[cfg(test)] @@ -115,7 +143,7 @@ mod test { use crate::{frames::RawFrame, types::Stacktrace}; - use super::ErrProps; + use super::RawErrProps; #[test] fn it_deserialises_error_props() { @@ -123,8 +151,8 @@ mod test { let raw: ClickHouseEvent = serde_json::from_str(raw).unwrap(); - let props: ErrProps = serde_json::from_str(&raw.properties.unwrap()).unwrap(); - let exception_list = &props.exception_list.unwrap(); + let props: RawErrProps = serde_json::from_str(&raw.properties.unwrap()).unwrap(); + let exception_list = &props.exception_list; assert_eq!(exception_list.len(), 1); assert_eq!( @@ -173,9 +201,9 @@ mod test { "$exception_list": [] }"#; - let props: Result = serde_json::from_str(raw); + let props: Result = serde_json::from_str(raw); assert!(props.is_ok()); - assert_eq!(props.unwrap().exception_list.unwrap().len(), 0); + assert_eq!(props.unwrap().exception_list.len(), 0); let raw: &'static str = r#"{ "$exception_list": [{ @@ -183,7 +211,7 @@ mod test { }] }"#; - let props: Result = serde_json::from_str(raw); + let props: Result = serde_json::from_str(raw); assert!(props.is_err()); assert_eq!( props.unwrap_err().to_string(), @@ -197,7 +225,7 @@ mod test { }] }"#; - let props: Result = serde_json::from_str(raw); + let props: Result = serde_json::from_str(raw); assert!(props.is_err()); assert_eq!( props.unwrap_err().to_string(), diff --git a/rust/cymbal/tests/resolve.rs b/rust/cymbal/tests/resolve.rs index d92eac66061..31533c3f297 100644 --- a/rust/cymbal/tests/resolve.rs +++ b/rust/cymbal/tests/resolve.rs @@ -9,7 +9,7 @@ use cymbal::{ sourcemap::SourcemapProvider, Catalog, }, - types::{ErrProps, Stacktrace}, + types::{RawErrProps, Stacktrace}, }; use httpmock::MockServer; use tokio::sync::Mutex; @@ -35,10 +35,10 @@ async fn end_to_end_resolver_test() { }); let exception: ClickHouseEvent = serde_json::from_str(EXAMPLE_EXCEPTION).unwrap(); - let props: ErrProps = serde_json::from_str(&exception.properties.unwrap()).unwrap(); + let mut props: RawErrProps = serde_json::from_str(&exception.properties.unwrap()).unwrap(); let Stacktrace::Raw { frames: mut test_stack, - } = props.exception_list.unwrap().swap_remove(0).stack.unwrap() + } = props.exception_list.swap_remove(0).stack.unwrap() else { panic!("Expected a Raw stacktrace") }; diff --git a/rust/cymbal/tests/types.rs b/rust/cymbal/tests/types.rs index 145a7606dcc..0404398a2b6 100644 --- a/rust/cymbal/tests/types.rs +++ b/rust/cymbal/tests/types.rs @@ -1,7 +1,7 @@ use std::str::FromStr; use common_types::ClickHouseEvent; -use cymbal::types::ErrProps; +use cymbal::types::RawErrProps; use serde_json::Value; #[test] @@ -11,7 +11,7 @@ fn serde_passthrough() { let raw: ClickHouseEvent = serde_json::from_str(raw).unwrap(); let before_properties: Value = serde_json::from_str(raw.properties.as_ref().unwrap()).unwrap(); - let properties_parsed: ErrProps = + let properties_parsed: RawErrProps = serde_json::from_str(raw.properties.as_ref().unwrap()).unwrap(); let properties_raw = serde_json::to_string(&properties_parsed).unwrap(); diff --git a/rust/feature-flags/Cargo.toml b/rust/feature-flags/Cargo.toml index 4cf4016767b..4099fd8ab06 100644 --- a/rust/feature-flags/Cargo.toml +++ b/rust/feature-flags/Cargo.toml @@ -39,6 +39,8 @@ health = { path = "../common/health" } common-metrics = { path = "../common/metrics" } tower = { workspace = true } derive_builder = "0.20.1" +petgraph = "0.6.5" +moka = { version = "0.12.8", features = ["future"] } [lints] workspace = true diff --git a/rust/feature-flags/src/api.rs b/rust/feature-flags/src/api.rs index 4430476d28a..be21c1c37f5 100644 --- a/rust/feature-flags/src/api.rs +++ b/rust/feature-flags/src/api.rs @@ -89,7 +89,7 @@ pub enum FlagError { #[error("Row not found in postgres")] RowNotFound, #[error("failed to parse redis cache data")] - DataParsingError, + RedisDataParsingError, #[error("failed to update redis cache")] CacheUpdateError, #[error("redis unavailable")] @@ -102,6 +102,14 @@ pub enum FlagError { TimeoutError, #[error("No group type mappings")] NoGroupTypeMappings, + #[error("Cohort not found")] + CohortNotFound(String), + #[error("Failed to parse cohort filters")] + CohortFiltersParsingError, + #[error("Cohort dependency cycle")] + CohortDependencyCycle(String), + #[error("Person not found")] + PersonNotFound, } impl IntoResponse for FlagError { @@ -138,7 +146,7 @@ impl IntoResponse for FlagError { FlagError::TokenValidationError => { (StatusCode::UNAUTHORIZED, "The provided API key is invalid or has expired. Please check your API key and try again.".to_string()) } - FlagError::DataParsingError => { + FlagError::RedisDataParsingError => { tracing::error!("Data parsing error: {:?}", self); ( StatusCode::SERVICE_UNAVAILABLE, @@ -194,6 +202,21 @@ impl IntoResponse for FlagError { "The requested row was not found in the database. Please try again later or contact support if the problem persists.".to_string(), ) } + FlagError::CohortNotFound(msg) => { + tracing::error!("Cohort not found: {}", msg); + (StatusCode::NOT_FOUND, msg) + } + FlagError::CohortFiltersParsingError => { + tracing::error!("Failed to parse cohort filters: {:?}", self); + (StatusCode::BAD_REQUEST, "Failed to parse cohort filters. Please try again later or contact support if the problem persists.".to_string()) + } + FlagError::CohortDependencyCycle(msg) => { + tracing::error!("Cohort dependency cycle: {}", msg); + (StatusCode::BAD_REQUEST, msg) + } + FlagError::PersonNotFound => { + (StatusCode::BAD_REQUEST, "Person not found. Please check your distinct_id and try again.".to_string()) + } } .into_response() } @@ -205,7 +228,7 @@ impl From for FlagError { CustomRedisError::NotFound => FlagError::TokenValidationError, CustomRedisError::PickleError(e) => { tracing::error!("failed to fetch data: {}", e); - FlagError::DataParsingError + FlagError::RedisDataParsingError } CustomRedisError::Timeout(_) => FlagError::TimeoutError, CustomRedisError::Other(e) => { diff --git a/rust/feature-flags/src/cohort_cache.rs b/rust/feature-flags/src/cohort_cache.rs new file mode 100644 index 00000000000..68894c19f88 --- /dev/null +++ b/rust/feature-flags/src/cohort_cache.rs @@ -0,0 +1,221 @@ +use crate::api::FlagError; +use crate::cohort_models::Cohort; +use crate::flag_matching::{PostgresReader, TeamId}; +use moka::future::Cache; +use std::time::Duration; + +/// CohortCacheManager manages the in-memory cache of cohorts using `moka` for caching. +/// +/// Features: +/// - **TTL**: Each cache entry expires after 5 minutes. +/// - **Size-based eviction**: The cache evicts least recently used entries when the maximum capacity is reached. +/// +/// ```text +/// CohortCacheManager { +/// postgres_reader: PostgresReader, +/// per_team_cohorts: Cache> { +/// // Example: +/// 2: [ +/// Cohort { id: 1, name: "Power Users", filters: {...} }, +/// Cohort { id: 2, name: "Churned", filters: {...} } +/// ], +/// 5: [ +/// Cohort { id: 3, name: "Beta Users", filters: {...} } +/// ] +/// } +/// } +/// ``` +/// +#[derive(Clone)] +pub struct CohortCacheManager { + postgres_reader: PostgresReader, + per_team_cohort_cache: Cache>, +} + +impl CohortCacheManager { + pub fn new( + postgres_reader: PostgresReader, + max_capacity: Option, + ttl_seconds: Option, + ) -> Self { + // We use the size of the cohort list (i.e., the number of cohorts for a given team)as the weight of the entry + let weigher = + |_: &TeamId, value: &Vec| -> u32 { value.len().try_into().unwrap_or(u32::MAX) }; + + let cache = Cache::builder() + .time_to_live(Duration::from_secs(ttl_seconds.unwrap_or(300))) // Default to 5 minutes + .weigher(weigher) + .max_capacity(max_capacity.unwrap_or(10_000)) // Default to 10,000 cohorts + .build(); + + Self { + postgres_reader, + per_team_cohort_cache: cache, + } + } + + /// Retrieves cohorts for a given team. + /// + /// If the cohorts are not present in the cache or have expired, it fetches them from the database, + /// caches the result upon successful retrieval, and then returns it. + pub async fn get_cohorts_for_team(&self, team_id: TeamId) -> Result, FlagError> { + if let Some(cached_cohorts) = self.per_team_cohort_cache.get(&team_id).await { + return Ok(cached_cohorts.clone()); + } + let fetched_cohorts = Cohort::list_from_pg(self.postgres_reader.clone(), team_id).await?; + self.per_team_cohort_cache + .insert(team_id, fetched_cohorts.clone()) + .await; + + Ok(fetched_cohorts) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::cohort_models::Cohort; + use crate::test_utils::{ + insert_cohort_for_team_in_pg, insert_new_team_in_pg, setup_pg_reader_client, + setup_pg_writer_client, + }; + use std::sync::Arc; + use tokio::time::{sleep, Duration}; + + /// Helper function to setup a new team for testing. + async fn setup_test_team( + writer_client: Arc, + ) -> Result { + let team = crate::test_utils::insert_new_team_in_pg(writer_client, None).await?; + Ok(team.id) + } + + /// Helper function to insert a cohort for a team. + async fn setup_test_cohort( + writer_client: Arc, + team_id: TeamId, + name: Option, + ) -> Result { + let filters = serde_json::json!({"properties": {"type": "OR", "values": [{"type": "OR", "values": [{"key": "$active", "type": "person", "value": [true], "negation": false, "operator": "exact"}]}]}}); + insert_cohort_for_team_in_pg(writer_client, team_id, name, filters, false).await + } + + /// Tests that cache entries expire after the specified TTL. + #[tokio::test] + async fn test_cache_expiry() -> Result<(), anyhow::Error> { + let writer_client = setup_pg_writer_client(None).await; + let reader_client = setup_pg_reader_client(None).await; + + let team_id = setup_test_team(writer_client.clone()).await?; + let _cohort = setup_test_cohort(writer_client.clone(), team_id, None).await?; + + // Initialize CohortCacheManager with a short TTL for testing + let cohort_cache = CohortCacheManager::new( + reader_client.clone(), + Some(100), + Some(1), // 1-second TTL + ); + + let cohorts = cohort_cache.get_cohorts_for_team(team_id).await?; + assert_eq!(cohorts.len(), 1); + assert_eq!(cohorts[0].team_id, team_id); + + let cached_cohorts = cohort_cache.per_team_cohort_cache.get(&team_id).await; + assert!(cached_cohorts.is_some()); + + // Wait for TTL to expire + sleep(Duration::from_secs(2)).await; + + // Attempt to retrieve from cache again + let cached_cohorts = cohort_cache.per_team_cohort_cache.get(&team_id).await; + assert!(cached_cohorts.is_none(), "Cache entry should have expired"); + + Ok(()) + } + + /// Tests that the cache correctly evicts least recently used entries based on the weigher. + #[tokio::test] + async fn test_cache_weigher() -> Result<(), anyhow::Error> { + let writer_client = setup_pg_writer_client(None).await; + let reader_client = setup_pg_reader_client(None).await; + + // Define a smaller max_capacity for testing + let max_capacity: u64 = 3; + + let cohort_cache = CohortCacheManager::new(reader_client.clone(), Some(max_capacity), None); + + let mut inserted_team_ids = Vec::new(); + + // Insert multiple teams and their cohorts + for _ in 0..max_capacity { + let team = insert_new_team_in_pg(writer_client.clone(), None).await?; + let team_id = team.id; + inserted_team_ids.push(team_id); + setup_test_cohort(writer_client.clone(), team_id, None).await?; + cohort_cache.get_cohorts_for_team(team_id).await?; + } + + cohort_cache.per_team_cohort_cache.run_pending_tasks().await; + let cache_size = cohort_cache.per_team_cohort_cache.entry_count(); + assert_eq!( + cache_size, max_capacity, + "Cache size should be equal to max_capacity" + ); + + let new_team = insert_new_team_in_pg(writer_client.clone(), None).await?; + let new_team_id = new_team.id; + setup_test_cohort(writer_client.clone(), new_team_id, None).await?; + cohort_cache.get_cohorts_for_team(new_team_id).await?; + + cohort_cache.per_team_cohort_cache.run_pending_tasks().await; + let cache_size_after = cohort_cache.per_team_cohort_cache.entry_count(); + assert_eq!( + cache_size_after, max_capacity, + "Cache size should remain equal to max_capacity after eviction" + ); + + let evicted_team_id = &inserted_team_ids[0]; + let cached_cohorts = cohort_cache + .per_team_cohort_cache + .get(evicted_team_id) + .await; + assert!( + cached_cohorts.is_none(), + "Least recently used cache entry should have been evicted" + ); + + let cached_new_team = cohort_cache.per_team_cohort_cache.get(&new_team_id).await; + assert!( + cached_new_team.is_some(), + "Newly added cache entry should be present" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_get_cohorts_for_team() -> Result<(), anyhow::Error> { + let writer_client = setup_pg_writer_client(None).await; + let reader_client = setup_pg_reader_client(None).await; + let team_id = setup_test_team(writer_client.clone()).await?; + let _cohort = setup_test_cohort(writer_client.clone(), team_id, None).await?; + let cohort_cache = CohortCacheManager::new(reader_client.clone(), None, None); + + let cached_cohorts = cohort_cache.per_team_cohort_cache.get(&team_id).await; + assert!(cached_cohorts.is_none(), "Cache should initially be empty"); + + let cohorts = cohort_cache.get_cohorts_for_team(team_id).await?; + assert_eq!(cohorts.len(), 1); + assert_eq!(cohorts[0].team_id, team_id); + + let cached_cohorts = cohort_cache + .per_team_cohort_cache + .get(&team_id) + .await + .unwrap(); + assert_eq!(cached_cohorts.len(), 1); + assert_eq!(cached_cohorts[0].team_id, team_id); + + Ok(()) + } +} diff --git a/rust/feature-flags/src/cohort_models.rs b/rust/feature-flags/src/cohort_models.rs new file mode 100644 index 00000000000..d1099839017 --- /dev/null +++ b/rust/feature-flags/src/cohort_models.rs @@ -0,0 +1,50 @@ +use crate::flag_definitions::PropertyFilter; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Cohort { + pub id: i32, + pub name: String, + pub description: Option, + pub team_id: i32, + pub deleted: bool, + pub filters: serde_json::Value, + pub query: Option, + pub version: Option, + pub pending_version: Option, + pub count: Option, + pub is_calculating: bool, + pub is_static: bool, + pub errors_calculating: i32, + pub groups: serde_json::Value, + pub created_by_id: Option, +} + +pub type CohortId = i32; + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[serde(rename_all = "UPPERCASE")] +pub enum CohortPropertyType { + AND, + OR, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct CohortProperty { + pub properties: InnerCohortProperty, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct InnerCohortProperty { + #[serde(rename = "type")] + pub prop_type: CohortPropertyType, + pub values: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct CohortValues { + #[serde(rename = "type")] + pub prop_type: String, + pub values: Vec, +} diff --git a/rust/feature-flags/src/cohort_operations.rs b/rust/feature-flags/src/cohort_operations.rs new file mode 100644 index 00000000000..ea4214ccdc0 --- /dev/null +++ b/rust/feature-flags/src/cohort_operations.rs @@ -0,0 +1,369 @@ +use std::collections::HashSet; +use std::sync::Arc; +use tracing::instrument; + +use crate::cohort_models::{Cohort, CohortId, CohortProperty, InnerCohortProperty}; +use crate::{api::FlagError, database::Client as DatabaseClient, flag_definitions::PropertyFilter}; + +impl Cohort { + /// Returns a cohort from postgres given a cohort_id and team_id + #[instrument(skip_all)] + pub async fn from_pg( + client: Arc, + cohort_id: i32, + team_id: i32, + ) -> Result { + let mut conn = client.get_connection().await.map_err(|e| { + tracing::error!("Failed to get database connection: {}", e); + // TODO should I model my errors more generally? Like, yes, everything behind this API is technically a FlagError, + // but I'm not sure if accessing Cohort definitions should be a FlagError (vs idk, a CohortError? A more general API error?) + FlagError::DatabaseUnavailable + })?; + + let query = "SELECT id, name, description, team_id, deleted, filters, query, version, pending_version, count, is_calculating, is_static, errors_calculating, groups, created_by_id FROM posthog_cohort WHERE id = $1 AND team_id = $2"; + let cohort = sqlx::query_as::<_, Cohort>(query) + .bind(cohort_id) + .bind(team_id) + .fetch_optional(&mut *conn) + .await + .map_err(|e| { + tracing::error!("Failed to fetch cohort from database: {}", e); + FlagError::Internal(format!("Database query error: {}", e)) + })?; + + cohort.ok_or_else(|| { + FlagError::CohortNotFound(format!( + "Cohort with id {} not found for team {}", + cohort_id, team_id + )) + }) + } + + /// Returns all cohorts for a given team + #[instrument(skip_all)] + pub async fn list_from_pg( + client: Arc, + team_id: i32, + ) -> Result, FlagError> { + let mut conn = client.get_connection().await.map_err(|e| { + tracing::error!("Failed to get database connection: {}", e); + FlagError::DatabaseUnavailable + })?; + + let query = "SELECT id, name, description, team_id, deleted, filters, query, version, pending_version, count, is_calculating, is_static, errors_calculating, groups, created_by_id FROM posthog_cohort WHERE team_id = $1"; + let cohorts = sqlx::query_as::<_, Cohort>(query) + .bind(team_id) + .fetch_all(&mut *conn) + .await + .map_err(|e| { + tracing::error!("Failed to fetch cohorts from database: {}", e); + FlagError::Internal(format!("Database query error: {}", e)) + })?; + + Ok(cohorts) + } + + /// Parses the filters JSON into a CohortProperty structure + // TODO: this doesn't handle the deprecated "groups" field, see + // https://github.com/PostHog/posthog/blob/feat/dynamic-cohorts-rust/posthog/models/cohort/cohort.py#L114-L169 + // I'll handle that in a separate PR. + pub fn parse_filters(&self) -> Result, FlagError> { + let cohort_property: CohortProperty = serde_json::from_value(self.filters.clone()) + .map_err(|e| { + tracing::error!("Failed to parse filters for cohort {}: {}", self.id, e); + FlagError::CohortFiltersParsingError + })?; + Ok(cohort_property + .properties + .to_property_filters() + .into_iter() + .filter(|f| !(f.key == "id" && f.prop_type == "cohort")) + .collect()) + } + + /// Extracts dependent CohortIds from the cohort's filters + pub fn extract_dependencies(&self) -> Result, FlagError> { + let cohort_property: CohortProperty = serde_json::from_value(self.filters.clone()) + .map_err(|e| { + tracing::error!("Failed to parse filters for cohort {}: {}", self.id, e); + FlagError::CohortFiltersParsingError + })?; + + let mut dependencies = HashSet::new(); + Self::traverse_filters(&cohort_property.properties, &mut dependencies)?; + Ok(dependencies) + } + + /// Recursively traverses the filter tree to find cohort dependencies + /// + /// Example filter tree structure: + /// ```json + /// { + /// "properties": { + /// "type": "OR", + /// "values": [ + /// { + /// "type": "OR", + /// "values": [ + /// { + /// "key": "id", + /// "value": 123, + /// "type": "cohort", + /// "operator": "exact" + /// }, + /// { + /// "key": "email", + /// "value": "@posthog.com", + /// "type": "person", + /// "operator": "icontains" + /// } + /// ] + /// } + /// ] + /// } + /// } + /// ``` + fn traverse_filters( + inner: &InnerCohortProperty, + dependencies: &mut HashSet, + ) -> Result<(), FlagError> { + for cohort_values in &inner.values { + for filter in &cohort_values.values { + if filter.is_cohort() { + // Assuming the value is a single integer CohortId + if let Some(cohort_id) = filter.value.as_i64() { + dependencies.insert(cohort_id as CohortId); + } else { + return Err(FlagError::CohortFiltersParsingError); + } + } + // NB: we don't support nested cohort properties, so we don't need to traverse further + } + } + Ok(()) + } +} + +impl InnerCohortProperty { + /// Flattens the nested cohort property structure into a list of property filters. + /// + /// The cohort property structure in Postgres looks like: + /// ```json + /// { + /// "type": "OR", + /// "values": [ + /// { + /// "type": "OR", + /// "values": [ + /// { + /// "key": "email", + /// "value": "@posthog.com", + /// "type": "person", + /// "operator": "icontains" + /// }, + /// { + /// "key": "age", + /// "value": 25, + /// "type": "person", + /// "operator": "gt" + /// } + /// ] + /// } + /// ] + /// } + /// ``` + pub fn to_property_filters(&self) -> Vec { + self.values + .iter() + .flat_map(|value| &value.values) + .cloned() + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + cohort_models::{CohortPropertyType, CohortValues}, + test_utils::{ + insert_cohort_for_team_in_pg, insert_new_team_in_pg, setup_pg_reader_client, + setup_pg_writer_client, + }, + }; + use serde_json::json; + + #[tokio::test] + async fn test_cohort_from_pg() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .expect("Failed to insert team"); + + let cohort = insert_cohort_for_team_in_pg( + postgres_writer.clone(), + team.id, + None, + json!({"properties": {"type": "OR", "values": [{"type": "OR", "values": [{"key": "$initial_browser_version", "type": "person", "value": ["125"], "negation": false, "operator": "exact"}]}]}}), + false, + ) + .await + .expect("Failed to insert cohort"); + + let fetched_cohort = Cohort::from_pg(postgres_reader, cohort.id, team.id) + .await + .expect("Failed to fetch cohort"); + + assert_eq!(fetched_cohort.id, cohort.id); + assert_eq!(fetched_cohort.name, "Test Cohort"); + assert_eq!(fetched_cohort.team_id, team.id); + } + + #[tokio::test] + async fn test_list_from_pg() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .expect("Failed to insert team"); + + // Insert multiple cohorts for the team + insert_cohort_for_team_in_pg( + postgres_writer.clone(), + team.id, + Some("Cohort 1".to_string()), + json!({"properties": {"type": "AND", "values": [{"type": "property", "values": [{"key": "age", "type": "person", "value": [30], "negation": false, "operator": "gt"}]}]}}), + false, + ) + .await + .expect("Failed to insert cohort1"); + + insert_cohort_for_team_in_pg( + postgres_writer.clone(), + team.id, + Some("Cohort 2".to_string()), + json!({"properties": {"type": "OR", "values": [{"type": "property", "values": [{"key": "country", "type": "person", "value": ["USA"], "negation": false, "operator": "exact"}]}]}}), + false, + ) + .await + .expect("Failed to insert cohort2"); + + let cohorts = Cohort::list_from_pg(postgres_reader, team.id) + .await + .expect("Failed to list cohorts"); + + assert_eq!(cohorts.len(), 2); + let names: HashSet = cohorts.into_iter().map(|c| c.name).collect(); + assert!(names.contains("Cohort 1")); + assert!(names.contains("Cohort 2")); + } + + #[test] + fn test_cohort_parse_filters() { + let cohort = Cohort { + id: 1, + name: "Test Cohort".to_string(), + description: None, + team_id: 1, + deleted: false, + filters: json!({"properties": {"type": "OR", "values": [{"type": "OR", "values": [{"key": "$initial_browser_version", "type": "person", "value": ["125"], "negation": false, "operator": "exact"}]}]}}), + query: None, + version: None, + pending_version: None, + count: None, + is_calculating: false, + is_static: false, + errors_calculating: 0, + groups: json!({}), + created_by_id: None, + }; + + let result = cohort.parse_filters().unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].key, "$initial_browser_version"); + assert_eq!(result[0].value, json!(["125"])); + assert_eq!(result[0].prop_type, "person"); + } + + #[test] + fn test_cohort_property_to_property_filters() { + let cohort_property = InnerCohortProperty { + prop_type: CohortPropertyType::AND, + values: vec![CohortValues { + prop_type: "property".to_string(), + values: vec![ + PropertyFilter { + key: "email".to_string(), + value: json!("test@example.com"), + operator: None, + prop_type: "person".to_string(), + group_type_index: None, + negation: None, + }, + PropertyFilter { + key: "age".to_string(), + value: json!(25), + operator: None, + prop_type: "person".to_string(), + group_type_index: None, + negation: None, + }, + ], + }], + }; + + let result = cohort_property.to_property_filters(); + assert_eq!(result.len(), 2); + assert_eq!(result[0].key, "email"); + assert_eq!(result[0].value, json!("test@example.com")); + assert_eq!(result[1].key, "age"); + assert_eq!(result[1].value, json!(25)); + } + + #[tokio::test] + async fn test_extract_dependencies() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .expect("Failed to insert team"); + + // Insert a single cohort that is dependent on another cohort + let dependent_cohort = insert_cohort_for_team_in_pg( + postgres_writer.clone(), + team.id, + Some("Dependent Cohort".to_string()), + json!({"properties": {"type": "OR", "values": [{"type": "OR", "values": [{"key": "$browser", "type": "person", "value": ["Safari"], "negation": false, "operator": "exact"}]}]}}), + false, + ) + .await + .expect("Failed to insert dependent_cohort"); + + // Insert main cohort with a single dependency + let main_cohort = insert_cohort_for_team_in_pg( + postgres_writer.clone(), + team.id, + Some("Main Cohort".to_string()), + json!({"properties": {"type": "OR", "values": [{"type": "OR", "values": [{"key": "id", "type": "cohort", "value": dependent_cohort.id, "negation": false}]}]}}), + false, + ) + .await + .expect("Failed to insert main_cohort"); + + let fetched_main_cohort = Cohort::from_pg(postgres_reader.clone(), main_cohort.id, team.id) + .await + .expect("Failed to fetch main cohort"); + + println!("fetched_main_cohort: {:?}", fetched_main_cohort); + + let dependencies = fetched_main_cohort.extract_dependencies().unwrap(); + let expected_dependencies: HashSet = + [dependent_cohort.id].iter().cloned().collect(); + + assert_eq!(dependencies, expected_dependencies); + } +} diff --git a/rust/feature-flags/src/flag_definitions.rs b/rust/feature-flags/src/flag_definitions.rs index baebaa04da3..d62ecc9e0e0 100644 --- a/rust/feature-flags/src/flag_definitions.rs +++ b/rust/feature-flags/src/flag_definitions.rs @@ -1,4 +1,7 @@ -use crate::{api::FlagError, database::Client as DatabaseClient, redis::Client as RedisClient}; +use crate::{ + api::FlagError, cohort_models::CohortId, database::Client as DatabaseClient, + redis::Client as RedisClient, +}; use serde::{Deserialize, Serialize}; use std::sync::Arc; use tracing::instrument; @@ -7,7 +10,7 @@ use tracing::instrument; // TODO: Add integration tests across repos to ensure this doesn't happen. pub const TEAM_FLAGS_CACHE_PREFIX: &str = "posthog:1:team_feature_flags_"; -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum OperatorType { Exact, @@ -25,6 +28,8 @@ pub enum OperatorType { IsDateExact, IsDateAfter, IsDateBefore, + In, + NotIn, } #[derive(Debug, Clone, Deserialize, Serialize)] @@ -36,10 +41,28 @@ pub struct PropertyFilter { pub value: serde_json::Value, pub operator: Option, #[serde(rename = "type")] + // TODO: worth making a enum here to differentiate between cohort and person filters? pub prop_type: String, + pub negation: Option, pub group_type_index: Option, } +impl PropertyFilter { + /// Checks if the filter is a cohort filter + pub fn is_cohort(&self) -> bool { + self.key == "id" && self.prop_type == "cohort" + } + + /// Returns the cohort id if the filter is a cohort filter, or None if it's not a cohort filter + /// or if the value cannot be parsed as a cohort id + pub fn get_cohort_id(&self) -> Option { + if !self.is_cohort() { + return None; + } + self.value.as_i64().map(|id| id as CohortId) + } +} + #[derive(Debug, Clone, Deserialize, Serialize)] pub struct FlagGroupType { pub properties: Option>, @@ -68,6 +91,9 @@ pub struct FlagFilters { pub super_groups: Option>, } +// TODO: see if you can combine these two structs, like we do with cohort models +// this will require not deserializing on read and instead doing it lazily, on-demand +// (which, tbh, is probably a better idea) #[derive(Debug, Clone, Deserialize, Serialize)] pub struct FeatureFlag { pub id: i32, @@ -142,7 +168,7 @@ impl FeatureFlagList { tracing::error!("failed to parse data to flags list: {}", e); println!("failed to parse data: {}", e); - FlagError::DataParsingError + FlagError::RedisDataParsingError })?; Ok(FeatureFlagList { flags: flags_list }) @@ -174,7 +200,7 @@ impl FeatureFlagList { .map(|row| { let filters = serde_json::from_value(row.filters).map_err(|e| { tracing::error!("Failed to deserialize filters for flag {}: {}", row.key, e); - FlagError::DataParsingError + FlagError::RedisDataParsingError })?; Ok(FeatureFlag { @@ -200,7 +226,7 @@ impl FeatureFlagList { ) -> Result<(), FlagError> { let payload = serde_json::to_string(&flags.flags).map_err(|e| { tracing::error!("Failed to serialize flags: {}", e); - FlagError::DataParsingError + FlagError::RedisDataParsingError })?; client @@ -1095,7 +1121,7 @@ mod tests { .expect("Failed to set malformed JSON in Redis"); let result = FeatureFlagList::from_redis(redis_client, team.id).await; - assert!(matches!(result, Err(FlagError::DataParsingError))); + assert!(matches!(result, Err(FlagError::RedisDataParsingError))); // Test database query error (using a non-existent table) let result = sqlx::query("SELECT * FROM non_existent_table") diff --git a/rust/feature-flags/src/flag_matching.rs b/rust/feature-flags/src/flag_matching.rs index bdcd542f098..d9332fce4e4 100644 --- a/rust/feature-flags/src/flag_matching.rs +++ b/rust/feature-flags/src/flag_matching.rs @@ -1,30 +1,35 @@ use crate::{ api::{FlagError, FlagValue, FlagsResponse}, + cohort_cache::CohortCacheManager, + cohort_models::{Cohort, CohortId}, database::Client as DatabaseClient, feature_flag_match_reason::FeatureFlagMatchReason, - flag_definitions::{FeatureFlag, FeatureFlagList, FlagGroupType, PropertyFilter}, + flag_definitions::{FeatureFlag, FeatureFlagList, FlagGroupType, OperatorType, PropertyFilter}, metrics_consts::{FLAG_EVALUATION_ERROR_COUNTER, FLAG_HASH_KEY_WRITES_COUNTER}, + metrics_utils::parse_exception_for_prometheus_label, property_matching::match_property, - utils::parse_exception_for_prometheus_label, }; use anyhow::Result; use common_metrics::inc; +use petgraph::algo::{is_cyclic_directed, toposort}; +use petgraph::graph::DiGraph; use serde_json::Value; use sha1::{Digest, Sha1}; -use sqlx::{postgres::PgQueryResult, Acquire, FromRow}; +use sqlx::{postgres::PgQueryResult, Acquire, FromRow, Row}; use std::fmt::Write; use std::sync::Arc; use std::{ - collections::{HashMap, HashSet}, + collections::{HashMap, HashSet, VecDeque}, time::Duration, }; use tokio::time::{sleep, timeout}; use tracing::{error, info}; -type TeamId = i32; -type GroupTypeIndex = i32; -type PostgresReader = Arc; -type PostgresWriter = Arc; +pub type TeamId = i32; +pub type PersonId = i32; +pub type GroupTypeIndex = i32; +pub type PostgresReader = Arc; +pub type PostgresWriter = Arc; #[derive(Debug)] struct SuperConditionEvaluation { @@ -172,6 +177,7 @@ impl GroupTypeMappingCache { /// to fetch the properties from the DB each time. #[derive(Clone, Default, Debug)] pub struct PropertiesCache { + person_id: Option, person_properties: Option>, group_properties: HashMap>, } @@ -182,6 +188,7 @@ pub struct FeatureFlagMatcher { pub team_id: TeamId, pub postgres_reader: PostgresReader, pub postgres_writer: PostgresWriter, + pub cohort_cache: Arc, group_type_mapping_cache: GroupTypeMappingCache, properties_cache: PropertiesCache, groups: HashMap, @@ -195,8 +202,8 @@ impl FeatureFlagMatcher { team_id: TeamId, postgres_reader: PostgresReader, postgres_writer: PostgresWriter, + cohort_cache: Arc, group_type_mapping_cache: Option, - properties_cache: Option, groups: Option>, ) -> Self { FeatureFlagMatcher { @@ -204,16 +211,26 @@ impl FeatureFlagMatcher { team_id, postgres_reader: postgres_reader.clone(), postgres_writer: postgres_writer.clone(), + cohort_cache, group_type_mapping_cache: group_type_mapping_cache .unwrap_or_else(|| GroupTypeMappingCache::new(team_id, postgres_reader.clone())), - properties_cache: properties_cache.unwrap_or_default(), groups: groups.unwrap_or_default(), + properties_cache: PropertiesCache::default(), } } - /// Evaluate feature flags for a given distinct_id - /// - Returns a map of feature flag keys to their values - /// - If an error occurs while evaluating a flag, it will be logged and the flag will be omitted from the result + /// Evaluates all feature flags for the current matcher context. + /// + /// ## Arguments + /// + /// * `feature_flags` - The list of feature flags to evaluate. + /// * `person_property_overrides` - Any overrides for person properties. + /// * `group_property_overrides` - Any overrides for group properties. + /// * `hash_key_override` - Optional hash key overrides for experience continuity. + /// + /// ## Returns + /// + /// * `FlagsResponse` - The result containing flag evaluations and any errors. pub async fn evaluate_all_feature_flags( &mut self, feature_flags: FeatureFlagList, @@ -732,14 +749,38 @@ impl FeatureFlagMatcher { .await; } - // NB: we can only evaluate group or person properties, not both - let properties_to_check = self - .get_properties_to_check(feature_flag, property_overrides, flag_property_filters) + // Separate cohort and non-cohort filters + let (cohort_filters, non_cohort_filters): (Vec, Vec) = + flag_property_filters + .iter() + .cloned() + .partition(|prop| prop.is_cohort()); + + // Get the properties we need to check for in this condition match from the flag + any overrides + let person_or_group_properties = self + .get_properties_to_check(feature_flag, property_overrides, &non_cohort_filters) .await?; - if !all_properties_match(flag_property_filters, &properties_to_check) { + // Evaluate non-cohort filters first, since they're cheaper to evaluate and we can return early if they don't match + if !all_properties_match(&non_cohort_filters, &person_or_group_properties) { return Ok((false, FeatureFlagMatchReason::NoConditionMatch)); } + + // Evaluate cohort filters, if any. + if !cohort_filters.is_empty() { + // Get the person ID for the current distinct ID – this value should be cached at this point, but as a fallback we fetch from the database + let person_id = self.get_person_id().await?; + if !self + .evaluate_cohort_filters( + &cohort_filters, + &person_or_group_properties, + person_id, + ) + .await? + { + return Ok((false, FeatureFlagMatchReason::NoConditionMatch)); + } + } } self.check_rollout(feature_flag, rollout_percentage, hash_key_overrides) @@ -786,6 +827,31 @@ impl FeatureFlagMatcher { } } + /// Retrieves the `PersonId` from the properties cache. + /// If the cache does not contain a `PersonId`, it fetches it from the database + /// and updates the cache accordingly. + async fn get_person_id(&mut self) -> Result { + match self.properties_cache.person_id { + Some(id) => Ok(id), + None => { + let id = self.get_person_id_from_db().await?; + self.properties_cache.person_id = Some(id); + Ok(id) + } + } + } + + /// Fetches the `PersonId` from the database based on the current `distinct_id` and `team_id`. + /// This method is called when the `PersonId` is not present in the properties cache. + async fn get_person_id_from_db(&mut self) -> Result { + let postgres_reader = self.postgres_reader.clone(); + let distinct_id = self.distinct_id.clone(); + let team_id = self.team_id; + fetch_person_properties_from_db(postgres_reader, distinct_id, team_id) + .await + .map(|(_, person_id)| person_id) + } + /// Get person properties from cache or database. /// /// This function attempts to retrieve person properties either from a cache or directly from the database. @@ -805,6 +871,56 @@ impl FeatureFlagMatcher { } } + /// Evaluates dynamic cohort property filters + /// + /// NB: This method first caches all of the cohorts associated with the team, which allows us to avoid + /// hitting the database for each cohort filter. + pub async fn evaluate_cohort_filters( + &self, + cohort_property_filters: &[PropertyFilter], + target_properties: &HashMap, + person_id: PersonId, + ) -> Result { + // At the start of the request, fetch all of the cohorts for the team from the cache + // This method also caches any cohorts for a given team in memory for the duration of the application, so we don't need to fetch from + // the database again until we restart the application. See the CohortCacheManager for more details. + let cohorts = self.cohort_cache.get_cohorts_for_team(self.team_id).await?; + + // Split the cohorts into static and dynamic, since the dynamic ones have property filters + // and we need to evaluate them based on the target properties, whereas the static ones are + // purely based on person properties and are membership-based. + let (static_cohorts, dynamic_cohorts): (Vec<_>, Vec<_>) = + cohorts.iter().partition(|c| c.is_static); + + // Store all cohort match results in a HashMap to avoid re-evaluating the same cohort multiple times, + // since the same cohort could appear in multiple property filters. + let mut cohort_matches = HashMap::new(); + + if !static_cohorts.is_empty() { + let results = evaluate_static_cohorts( + self.postgres_reader.clone(), + person_id, + static_cohorts.iter().map(|c| c.id).collect(), + ) + .await?; + cohort_matches.extend(results); + } + + if !dynamic_cohorts.is_empty() { + for filter in cohort_property_filters { + let cohort_id = filter + .get_cohort_id() + .ok_or(FlagError::CohortFiltersParsingError)?; + let match_result = + evaluate_dynamic_cohorts(cohort_id, target_properties, cohorts.clone())?; + cohort_matches.insert(cohort_id, match_result); + } + } + + // Apply cohort membership logic (IN|NOT_IN) to the cohort match results + apply_cohort_membership_logic(cohort_property_filters, &cohort_matches) + } + /// Check if a super condition matches for a feature flag. /// /// This function evaluates the super conditions of a feature flag to determine if any of them should be enabled. @@ -917,11 +1033,12 @@ impl FeatureFlagMatcher { let postgres_reader = self.postgres_reader.clone(); let distinct_id = self.distinct_id.clone(); let team_id = self.team_id; - let db_properties = + let (db_properties, person_id) = fetch_person_properties_from_db(postgres_reader, distinct_id, team_id).await?; - // once the properties are fetched, cache them so we don't need to fetch again in a given request + // once the properties and person ID are fetched, cache them so we don't need to fetch again in a given request self.properties_cache.person_properties = Some(db_properties.clone()); + self.properties_cache.person_id = Some(person_id); Ok(db_properties) } @@ -1048,6 +1165,221 @@ impl FeatureFlagMatcher { } } +/// Evaluate static cohort filters by checking if the person is in each cohort. +async fn evaluate_static_cohorts( + postgres_reader: PostgresReader, + person_id: i32, // Change this parameter from distinct_id to person_id + cohort_ids: Vec, +) -> Result, FlagError> { + let mut conn = postgres_reader.get_connection().await?; + + let query = r#" + WITH cohort_membership AS ( + SELECT c.cohort_id, + CASE WHEN pc.cohort_id IS NOT NULL THEN true ELSE false END AS is_member + FROM unnest($1::integer[]) AS c(cohort_id) + LEFT JOIN posthog_cohortpeople AS pc + ON pc.person_id = $2 + AND pc.cohort_id = c.cohort_id + ) + SELECT cohort_id, is_member + FROM cohort_membership + "#; + + let rows = sqlx::query(query) + .bind(&cohort_ids) + .bind(person_id) // Bind person_id directly + .fetch_all(&mut *conn) + .await?; + + let result = rows + .into_iter() + .map(|row| { + let cohort_id: CohortId = row.get("cohort_id"); + let is_member: bool = row.get("is_member"); + (cohort_id, is_member) + }) + .collect(); + + Ok(result) +} + +/// Evaluates a dynamic cohort and its dependencies. +/// This uses a topological sort to evaluate dependencies first, which is necessary +/// because a cohort can depend on another cohort, and we need to respect the dependency order. +fn evaluate_dynamic_cohorts( + initial_cohort_id: CohortId, + target_properties: &HashMap, + cohorts: Vec, +) -> Result { + let cohort_dependency_graph = + build_cohort_dependency_graph(initial_cohort_id, cohorts.clone())?; + + // We need to sort cohorts topologically to ensure we evaluate dependencies before the cohorts that depend on them. + // For example, if cohort A depends on cohort B, we need to evaluate B first to know if A matches. + // This also helps detect cycles - if cohort A depends on B which depends on A, toposort will fail. + let sorted_cohort_ids_as_graph_nodes = + toposort(&cohort_dependency_graph, None).map_err(|e| { + FlagError::CohortDependencyCycle(format!("Cyclic dependency detected: {:?}", e)) + })?; + + // Store evaluation results for each cohort in a map, so we can look up whether a cohort matched + // when evaluating cohorts that depend on it, and also return the final result for the initial cohort + let mut evaluation_results = HashMap::new(); + + // Iterate through the sorted nodes in reverse order (so that we can evaluate dependencies first) + for node in sorted_cohort_ids_as_graph_nodes.into_iter().rev() { + let cohort_id = cohort_dependency_graph[node]; + let cohort = cohorts + .iter() + .find(|c| c.id == cohort_id) + .ok_or(FlagError::CohortNotFound(cohort_id.to_string()))?; + let property_filters = cohort.parse_filters()?; + let dependencies = cohort.extract_dependencies()?; + + // Check if all dependencies have been met (i.e., previous cohorts matched) + let dependencies_met = dependencies + .iter() + .all(|dep_id| evaluation_results.get(dep_id).copied().unwrap_or(false)); + + // If dependencies are not met, mark the current cohort as not matched and continue + // NB: We don't want to _exit_ here, since the non-matching cohort could be wrapped in a `not_in` operator + // and we want to evaluate all cohorts to determine if the initial cohort matches. + if !dependencies_met { + evaluation_results.insert(cohort_id, false); + continue; + } + + // Evaluate all property filters for the current cohort + let all_filters_match = property_filters + .iter() + .all(|filter| match_property(filter, target_properties, false).unwrap_or(false)); + + // Store the evaluation result for the current cohort + evaluation_results.insert(cohort_id, all_filters_match); + } + + // Retrieve and return the evaluation result for the initial cohort + evaluation_results + .get(&initial_cohort_id) + .copied() + .ok_or_else(|| FlagError::CohortNotFound(initial_cohort_id.to_string())) +} + +/// Apply cohort membership logic (i.e., IN|NOT_IN) +fn apply_cohort_membership_logic( + cohort_filters: &[PropertyFilter], + cohort_matches: &HashMap, +) -> Result { + for filter in cohort_filters { + let cohort_id = filter + .get_cohort_id() + .ok_or(FlagError::CohortFiltersParsingError)?; + let matches = cohort_matches.get(&cohort_id).copied().unwrap_or(false); + let operator = filter.operator.unwrap_or(OperatorType::In); + + // Combine the operator logic directly within this method + let membership_match = match operator { + OperatorType::In => matches, + OperatorType::NotIn => !matches, + // Currently supported operators are IN and NOT IN + // Any other operator defaults to false + _ => false, + }; + + // If any filter does not match, return false early + if !membership_match { + return Ok(false); + } + } + // All filters matched + Ok(true) +} + +/// Constructs a dependency graph for cohorts. +/// +/// Example dependency graph: +/// ```text +/// A B +/// | /| +/// | / | +/// | / | +/// C D +/// \ / +/// \ / +/// E +/// ``` +/// In this example: +/// - Cohorts A and B are root nodes (no dependencies) +/// - C depends on A and B +/// - D depends on B +/// - E depends on C and D +/// +/// The graph is acyclic, which is required for valid cohort dependencies. +fn build_cohort_dependency_graph( + initial_cohort_id: CohortId, + cohorts: Vec, +) -> Result, FlagError> { + let mut graph = DiGraph::new(); + let mut node_map = HashMap::new(); + let mut queue = VecDeque::new(); + + let initial_cohort = cohorts + .iter() + .find(|c| c.id == initial_cohort_id) + .ok_or(FlagError::CohortNotFound(initial_cohort_id.to_string()))?; + + if initial_cohort.is_static { + return Ok(graph); + } + + // This implements a breadth-first search (BFS) traversal to build a directed graph of cohort dependencies. + // Starting from the initial cohort, we: + // 1. Add each cohort as a node in the graph + // 2. Track visited nodes in a map to avoid duplicates + // 3. For each cohort, get its dependencies and add directed edges from the cohort to its dependencies + // 4. Queue up any unvisited dependencies to process their dependencies later + // This builds up the full dependency graph level by level, which we can later check for cycles + queue.push_back(initial_cohort_id); + node_map.insert(initial_cohort_id, graph.add_node(initial_cohort_id)); + + while let Some(cohort_id) = queue.pop_front() { + let cohort = cohorts + .iter() + .find(|c| c.id == cohort_id) + .ok_or(FlagError::CohortNotFound(cohort_id.to_string()))?; + let dependencies = cohort.extract_dependencies()?; + for dep_id in dependencies { + // Retrieve the current node **before** mutable borrowing + // This is safe because we're not mutating the node map, + // and it keeps the borrow checker happy + let current_node = node_map[&cohort_id]; + // Add dependency node if we haven't seen this cohort ID before in our traversal. + // This happens when we discover a new dependency that wasn't previously + // encountered while processing other cohorts in the graph. + let dep_node = node_map + .entry(dep_id) + .or_insert_with(|| graph.add_node(dep_id)); + + graph.add_edge(current_node, *dep_node, ()); + + if !node_map.contains_key(&dep_id) { + queue.push_back(dep_id); + } + } + } + + // Check for cycles, this is an directed acyclic graph so we use is_cyclic_directed + if is_cyclic_directed(&graph) { + return Err(FlagError::CohortDependencyCycle(format!( + "Cyclic dependency detected starting at cohort {}", + initial_cohort_id + ))); + } + + Ok(graph) +} + /// Fetch and locally cache all properties for a given distinct ID and team ID. /// /// This function fetches both person and group properties for a specified distinct ID and team ID. @@ -1063,32 +1395,52 @@ async fn fetch_and_locally_cache_all_properties( let query = r#" SELECT - (SELECT "posthog_person"."properties" - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" - ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") - WHERE ("posthog_persondistinctid"."distinct_id" = $1 - AND "posthog_persondistinctid"."team_id" = $2 - AND "posthog_person"."team_id" = $2) - LIMIT 1) as person_properties, - - (SELECT json_object_agg("posthog_group"."group_type_index", "posthog_group"."group_properties") - FROM "posthog_group" - WHERE ("posthog_group"."team_id" = $2 - AND "posthog_group"."group_type_index" = ANY($3))) as group_properties + person.person_id, + person.person_properties, + group_properties.group_properties + FROM ( + SELECT + "posthog_person"."id" AS person_id, + "posthog_person"."properties" AS person_properties + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" + ON "posthog_person"."id" = "posthog_persondistinctid"."person_id" + WHERE + "posthog_persondistinctid"."distinct_id" = $1 + AND "posthog_persondistinctid"."team_id" = $2 + AND "posthog_person"."team_id" = $2 + LIMIT 1 + ) AS person, + ( + SELECT + json_object_agg( + "posthog_group"."group_type_index", + "posthog_group"."group_properties" + ) AS group_properties + FROM "posthog_group" + WHERE + "posthog_group"."team_id" = $2 + AND "posthog_group"."group_type_index" = ANY($3) + ) AS group_properties "#; let group_type_indexes_vec: Vec = group_type_indexes.iter().cloned().collect(); - let row: (Option, Option) = sqlx::query_as(query) + let row: (Option, Option, Option) = sqlx::query_as(query) .bind(&distinct_id) .bind(team_id) .bind(&group_type_indexes_vec) .fetch_optional(&mut *conn) .await? - .unwrap_or((None, None)); + .unwrap_or((None, None, None)); - if let Some(person_props) = row.0 { + let (person_id, person_props, group_props) = row; + + if let Some(person_id) = person_id { + properties_cache.person_id = Some(person_id); + } + + if let Some(person_props) = person_props { properties_cache.person_properties = Some( person_props .as_object() @@ -1099,7 +1451,7 @@ async fn fetch_and_locally_cache_all_properties( ); } - if let Some(group_props) = row.1 { + if let Some(group_props) = group_props { let group_props_map: HashMap> = group_props .as_object() .unwrap_or(&serde_json::Map::new()) @@ -1122,7 +1474,7 @@ async fn fetch_and_locally_cache_all_properties( Ok(()) } -/// Fetch person properties from the database for a given distinct ID and team ID. +/// Fetch person properties and person ID from the database for a given distinct ID and team ID. /// /// This function constructs and executes a SQL query to fetch the person properties for a specified distinct ID and team ID. /// It returns the fetched properties as a HashMap. @@ -1130,31 +1482,37 @@ async fn fetch_person_properties_from_db( postgres_reader: PostgresReader, distinct_id: String, team_id: TeamId, -) -> Result, FlagError> { +) -> Result<(HashMap, i32), FlagError> { let mut conn = postgres_reader.as_ref().get_connection().await?; let query = r#" - SELECT "posthog_person"."properties" as person_properties - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") - WHERE ("posthog_persondistinctid"."distinct_id" = $1 - AND "posthog_persondistinctid"."team_id" = $2 - AND "posthog_person"."team_id" = $2) - LIMIT 1 - "#; + SELECT "posthog_person"."id" as person_id, "posthog_person"."properties" as person_properties + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = $1 + AND "posthog_persondistinctid"."team_id" = $2 + AND "posthog_person"."team_id" = $2) + LIMIT 1 + "#; - let row: Option = sqlx::query_scalar(query) + let row: Option<(i32, Value)> = sqlx::query_as(query) .bind(&distinct_id) .bind(team_id) .fetch_optional(&mut *conn) .await?; - Ok(row - .and_then(|v| v.as_object().cloned()) - .unwrap_or_default() - .into_iter() - .map(|(k, v)| (k, v.clone())) - .collect()) + match row { + Some((person_id, person_props)) => { + let properties_map = person_props + .as_object() + .unwrap_or(&serde_json::Map::new()) + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect(); + Ok((properties_map, person_id)) + } + None => Err(FlagError::PersonNotFound), + } } /// Fetch group properties from the database for a given team ID and group type index. @@ -1216,11 +1574,11 @@ fn locally_computable_property_overrides( /// Check if all properties match the given filters fn all_properties_match( flag_condition_properties: &[PropertyFilter], - target_properties: &HashMap, + matching_property_values: &HashMap, ) -> bool { flag_condition_properties .iter() - .all(|property| match_property(property, target_properties, false).unwrap_or(false)) + .all(|property| match_property(property, matching_property_values, false).unwrap_or(false)) } async fn get_feature_flag_hash_key_overrides( @@ -1443,6 +1801,7 @@ mod tests { OperatorType, }, test_utils::{ + add_person_to_cohort, get_person_id_by_distinct_id, insert_cohort_for_team_in_pg, insert_flag_for_team_in_pg, insert_new_team_in_pg, insert_person_for_team_in_pg, setup_pg_reader_client, setup_pg_writer_client, }, @@ -1485,6 +1844,7 @@ mod tests { async fn test_fetch_properties_from_pg_to_match() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await @@ -1529,12 +1889,13 @@ mod tests { )) .unwrap(); + // Matcher for a matching distinct_id let mut matcher = FeatureFlagMatcher::new( distinct_id.clone(), team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -1542,12 +1903,13 @@ mod tests { assert!(match_result.matches); assert_eq!(match_result.variant, None); + // Matcher for a non-matching distinct_id let mut matcher = FeatureFlagMatcher::new( not_matching_distinct_id.clone(), team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -1555,24 +1917,27 @@ mod tests { assert!(!match_result.matches); assert_eq!(match_result.variant, None); + // Matcher for a distinct_id that does not exist let mut matcher = FeatureFlagMatcher::new( "other_distinct_id".to_string(), team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); - let match_result = matcher.get_match(&flag, None, None).await.unwrap(); - assert!(!match_result.matches); - assert_eq!(match_result.variant, None); + let match_result = matcher.get_match(&flag, None, None).await; + + // Expecting an error for non-existent distinct_id + assert!(match_result.is_err()); } #[tokio::test] async fn test_person_property_overrides() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -1590,6 +1955,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -1611,7 +1977,7 @@ mod tests { team.id, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ); @@ -1633,6 +1999,7 @@ mod tests { async fn test_group_property_overrides() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -1650,6 +2017,7 @@ mod tests { operator: None, prop_type: "group".to_string(), group_type_index: Some(1), + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -1664,10 +2032,12 @@ mod tests { None, ); - let mut cache = GroupTypeMappingCache::new(team.id, postgres_reader.clone()); + let mut group_type_mapping_cache = + GroupTypeMappingCache::new(team.id, postgres_reader.clone()); let group_types_to_indexes = [("organization".to_string(), 1)].into_iter().collect(); - cache.group_types_to_indexes = group_types_to_indexes; - cache.group_indexes_to_types = [(1, "organization".to_string())].into_iter().collect(); + group_type_mapping_cache.group_types_to_indexes = group_types_to_indexes; + group_type_mapping_cache.group_indexes_to_types = + [(1, "organization".to_string())].into_iter().collect(); let groups = HashMap::from([("organization".to_string(), json!("org_123"))]); @@ -1684,8 +2054,8 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - Some(cache), - None, + cohort_cache.clone(), + Some(group_type_mapping_cache), Some(groups), ); @@ -1708,14 +2078,14 @@ mod tests { let flag = create_test_flag_with_variants(1); let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; - - let mut cache = GroupTypeMappingCache::new(1, postgres_reader.clone()); + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let mut group_type_mapping_cache = GroupTypeMappingCache::new(1, postgres_reader.clone()); let group_types_to_indexes = [("group_type_1".to_string(), 1)].into_iter().collect(); let group_type_index_to_name = [(1, "group_type_1".to_string())].into_iter().collect(); - cache.group_types_to_indexes = group_types_to_indexes; - cache.group_indexes_to_types = group_type_index_to_name; + group_type_mapping_cache.group_types_to_indexes = group_types_to_indexes; + group_type_mapping_cache.group_indexes_to_types = group_type_index_to_name; let groups = HashMap::from([("group_type_1".to_string(), json!("group_key_1"))]); @@ -1724,8 +2094,8 @@ mod tests { 1, postgres_reader.clone(), postgres_writer.clone(), - Some(cache), - None, + cohort_cache.clone(), + Some(group_type_mapping_cache), Some(groups), ); let variant = matcher.get_matching_variant(&flag, None).await.unwrap(); @@ -1740,6 +2110,7 @@ mod tests { async fn test_get_matching_variant_with_db() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -1751,7 +2122,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -1765,6 +2136,7 @@ mod tests { async fn test_is_condition_match_empty_properties() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flag = create_test_flag( Some(1), None, @@ -1797,7 +2169,7 @@ mod tests { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ); @@ -1854,6 +2226,7 @@ mod tests { async fn test_overrides_avoid_db_lookups() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -1871,6 +2244,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -1893,7 +2267,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -1923,6 +2297,7 @@ mod tests { async fn test_fallback_to_db_when_overrides_insufficient() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -1941,6 +2316,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, PropertyFilter { key: "age".to_string(), @@ -1948,6 +2324,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, ]), rollout_percentage: Some(100.0), @@ -1982,7 +2359,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2006,6 +2383,7 @@ mod tests { async fn test_property_fetching_and_caching() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2025,7 +2403,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2050,6 +2428,7 @@ mod tests { async fn test_property_caching() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2069,7 +2448,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2102,7 +2481,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2150,6 +2529,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }, PropertyFilter { key: "age".to_string(), @@ -2157,6 +2537,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, ]; @@ -2170,6 +2551,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }, PropertyFilter { key: "cohort".to_string(), @@ -2177,6 +2559,7 @@ mod tests { operator: None, prop_type: "cohort".to_string(), group_type_index: None, + negation: None, }, ]; @@ -2189,6 +2572,7 @@ mod tests { async fn test_concurrent_flag_evaluation() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2218,13 +2602,14 @@ mod tests { let flag_clone = flag.clone(); let postgres_reader_clone = postgres_reader.clone(); let postgres_writer_clone = postgres_writer.clone(); + let cohort_cache_clone = cohort_cache.clone(); handles.push(tokio::spawn(async move { let mut matcher = FeatureFlagMatcher::new( format!("test_user_{}", i), team.id, postgres_reader_clone, postgres_writer_clone, - None, + cohort_cache_clone, None, None, ); @@ -2246,6 +2631,7 @@ mod tests { async fn test_property_operators() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2264,6 +2650,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, PropertyFilter { key: "email".to_string(), @@ -2271,6 +2658,7 @@ mod tests { operator: Some(OperatorType::Icontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, ]), rollout_percentage: Some(100.0), @@ -2300,7 +2688,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2314,7 +2702,7 @@ mod tests { async fn test_empty_hashed_identifier() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; - + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flag = create_test_flag( Some(1), None, @@ -2341,7 +2729,7 @@ mod tests { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ); @@ -2355,6 +2743,7 @@ mod tests { async fn test_rollout_percentage() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let mut flag = create_test_flag( Some(1), None, @@ -2381,7 +2770,7 @@ mod tests { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ); @@ -2402,7 +2791,7 @@ mod tests { async fn test_uneven_variant_distribution() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; - + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let mut flag = create_test_flag_with_variants(1); // Adjust variant rollout percentages to be uneven @@ -2432,7 +2821,7 @@ mod tests { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ); @@ -2464,6 +2853,7 @@ mod tests { async fn test_missing_properties_in_db() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2491,6 +2881,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2510,7 +2901,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache, None, None, ); @@ -2524,6 +2915,7 @@ mod tests { async fn test_malformed_property_data() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2551,6 +2943,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2570,7 +2963,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache, None, None, ); @@ -2585,6 +2978,7 @@ mod tests { async fn test_get_match_with_insufficient_overrides() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2603,6 +2997,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }, PropertyFilter { key: "age".to_string(), @@ -2610,6 +3005,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }, ]), rollout_percentage: Some(100.0), @@ -2644,7 +3040,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache, None, None, ); @@ -2661,6 +3057,7 @@ mod tests { async fn test_evaluation_reasons() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flag = create_test_flag( Some(1), None, @@ -2687,7 +3084,7 @@ mod tests { 1, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache, None, None, ); @@ -2705,6 +3102,7 @@ mod tests { async fn test_complex_conditions() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2723,6 +3121,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2734,6 +3133,7 @@ mod tests { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2763,7 +3163,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache, None, None, ); @@ -2777,6 +3177,7 @@ mod tests { async fn test_super_condition_matches_boolean() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2795,6 +3196,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(0.0), variant: None, @@ -2806,6 +3208,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2826,6 +3229,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2845,12 +3249,25 @@ mod tests { .await .unwrap(); + insert_person_for_team_in_pg(postgres_reader.clone(), team.id, "lil_id".to_string(), None) + .await + .unwrap(); + + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "another_id".to_string(), + None, + ) + .await + .unwrap(); + let mut matcher_test_id = FeatureFlagMatcher::new( "test_id".to_string(), team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2860,7 +3277,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2870,7 +3287,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2897,6 +3314,7 @@ mod tests { async fn test_super_condition_matches_string() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2924,6 +3342,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(0.0), variant: None, @@ -2935,6 +3354,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2955,6 +3375,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -2970,7 +3391,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -2986,6 +3407,7 @@ mod tests { async fn test_super_condition_matches_and_false() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -2999,6 +3421,19 @@ mod tests { .await .unwrap(); + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "another_id".to_string(), + None, + ) + .await + .unwrap(); + + insert_person_for_team_in_pg(postgres_reader.clone(), team.id, "lil_id".to_string(), None) + .await + .unwrap(); + let flag = create_test_flag( Some(1), Some(team.id), @@ -3013,6 +3448,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(0.0), variant: None, @@ -3024,6 +3460,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3044,6 +3481,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3059,7 +3497,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -3069,7 +3507,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -3079,7 +3517,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ); @@ -3116,6 +3554,811 @@ mod tests { assert_eq!(result_another_id.condition_index, Some(2)); } + #[tokio::test] + async fn test_basic_cohort_matching() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a cohort with the condition that matches the test user's properties + let cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "$browser_version", + "type": "person", + "value": "125", + "negation": false, + "operator": "gt" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a person with properties that match the cohort condition + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "test_user".to_string(), + Some(json!({"$browser_version": 126})), + ) + .await + .unwrap(); + + // Define a flag with a cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort_row.id), + operator: Some(OperatorType::In), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!(result.matches); + } + + #[tokio::test] + async fn test_not_in_cohort_matching() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a cohort with a condition that does not match the test user's properties + let cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "$browser_version", + "type": "person", + "value": "130", + "negation": false, + "operator": "gt" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a person with properties that do not match the cohort condition + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "test_user".to_string(), + Some(json!({"$browser_version": 126})), + ) + .await + .unwrap(); + + // Define a flag with a NotIn cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort_row.id), + operator: Some(OperatorType::NotIn), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!(result.matches); + } + + #[tokio::test] + async fn test_not_in_cohort_matching_user_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a cohort with a condition that matches the test user's properties + let cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "$browser_version", + "type": "person", + "value": "125", + "negation": false, + "operator": "gt" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a person with properties that match the cohort condition + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "test_user".to_string(), + Some(json!({"$browser_version": 126})), + ) + .await + .unwrap(); + + // Define a flag with a NotIn cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort_row.id), + operator: Some(OperatorType::NotIn), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + // The user matches the cohort, but the flag is set to NotIn, so it should evaluate to false + assert!(!result.matches); + } + + #[tokio::test] + async fn test_cohort_dependent_on_another_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a base cohort + let base_cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "$browser_version", + "type": "person", + "value": "125", + "negation": false, + "operator": "gt" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a dependent cohort that includes the base cohort + let dependent_cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "id", + "type": "cohort", + "value": base_cohort_row.id, + "negation": false, + "operator": "in" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a person with properties that match the base cohort condition + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "test_user".to_string(), + Some(json!({"$browser_version": 126})), + ) + .await + .unwrap(); + + // Define a flag with a cohort filter that depends on another cohort + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(dependent_cohort_row.id), + operator: Some(OperatorType::In), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!(result.matches); + } + + #[tokio::test] + async fn test_in_cohort_matching_user_not_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a cohort with a condition that does not match the test user's properties + let cohort_row = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + None, + json!({ + "properties": { + "type": "OR", + "values": [{ + "type": "OR", + "values": [{ + "key": "$browser_version", + "type": "person", + "value": "130", + "negation": false, + "operator": "gt" + }] + }] + } + }), + false, + ) + .await + .unwrap(); + + // Insert a person with properties that do not match the cohort condition + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + "test_user".to_string(), + Some(json!({"$browser_version": 125})), + ) + .await + .unwrap(); + + // Define a flag with an In cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort_row.id), + operator: Some(OperatorType::In), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + // The user does not match the cohort, and the flag is set to In, so it should evaluate to false + assert!(!result.matches); + } + + #[tokio::test] + async fn test_static_cohort_matching_user_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a static cohort + let cohort = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + Some("Static Cohort".to_string()), + json!({}), // Static cohorts don't have property filters + true, // is_static = true + ) + .await + .unwrap(); + + // Insert a person + let distinct_id = "static_user".to_string(); + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + distinct_id.clone(), + Some(json!({"email": "static@user.com"})), + ) + .await + .unwrap(); + + // Retrieve the person's ID + let person_id = + get_person_id_by_distinct_id(postgres_reader.clone(), team.id, &distinct_id) + .await + .unwrap(); + + // Associate the person with the static cohort + add_person_to_cohort(postgres_reader.clone(), person_id, cohort.id) + .await + .unwrap(); + + // Define a flag with an 'In' cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort.id), + operator: Some(OperatorType::In), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + distinct_id.clone(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!( + result.matches, + "User should match the static cohort and flag" + ); + } + + #[tokio::test] + async fn test_static_cohort_matching_user_not_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a static cohort + let cohort = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + Some("Another Static Cohort".to_string()), + json!({}), // Static cohorts don't have property filters + true, + ) + .await + .unwrap(); + + // Insert a person + let distinct_id = "non_static_user".to_string(); + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + distinct_id.clone(), + Some(json!({"email": "nonstatic@user.com"})), + ) + .await + .unwrap(); + + // Note: Do NOT associate the person with the static cohort + + // Define a flag with an 'In' cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort.id), + operator: Some(OperatorType::In), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + distinct_id.clone(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!( + !result.matches, + "User should not match the static cohort and flag" + ); + } + + #[tokio::test] + async fn test_static_cohort_not_in_matching_user_not_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a static cohort + let cohort = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + Some("Static Cohort NotIn".to_string()), + json!({}), // Static cohorts don't have property filters + true, // is_static = true + ) + .await + .unwrap(); + + // Insert a person + let distinct_id = "not_in_static_user".to_string(); + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + distinct_id.clone(), + Some(json!({"email": "notinstatic@user.com"})), + ) + .await + .unwrap(); + + // No association with the static cohort + + // Define a flag with a 'NotIn' cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort.id), + operator: Some(OperatorType::NotIn), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + distinct_id.clone(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!( + result.matches, + "User not in the static cohort should match the 'NotIn' flag" + ); + } + + #[tokio::test] + async fn test_static_cohort_not_in_matching_user_in_cohort() { + let postgres_reader = setup_pg_reader_client(None).await; + let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let team = insert_new_team_in_pg(postgres_reader.clone(), None) + .await + .unwrap(); + + // Insert a static cohort + let cohort = insert_cohort_for_team_in_pg( + postgres_reader.clone(), + team.id, + Some("Static Cohort NotIn User In".to_string()), + json!({}), // Static cohorts don't have property filters + true, // is_static = true + ) + .await + .unwrap(); + + // Insert a person + let distinct_id = "in_not_in_static_user".to_string(); + insert_person_for_team_in_pg( + postgres_reader.clone(), + team.id, + distinct_id.clone(), + Some(json!({"email": "innotinstatic@user.com"})), + ) + .await + .unwrap(); + + // Retrieve the person's ID + let person_id = + get_person_id_by_distinct_id(postgres_reader.clone(), team.id, &distinct_id) + .await + .unwrap(); + + // Associate the person with the static cohort + add_person_to_cohort(postgres_reader.clone(), person_id, cohort.id) + .await + .unwrap(); + + // Define a flag with a 'NotIn' cohort filter + let flag = create_test_flag( + None, + Some(team.id), + None, + None, + Some(FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "id".to_string(), + value: json!(cohort.id), + operator: Some(OperatorType::NotIn), + prop_type: "cohort".to_string(), + group_type_index: None, + negation: Some(false), + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }), + None, + None, + None, + ); + + let mut matcher = FeatureFlagMatcher::new( + distinct_id.clone(), + team.id, + postgres_reader.clone(), + postgres_writer.clone(), + cohort_cache.clone(), + None, + None, + ); + + let result = matcher.get_match(&flag, None, None).await.unwrap(); + + assert!( + !result.matches, + "User in the static cohort should not match the 'NotIn' flag" + ); + } + #[tokio::test] async fn test_set_feature_flag_hash_key_overrides_success() { let postgres_reader = setup_pg_reader_client(None).await; @@ -3123,7 +4366,7 @@ mod tests { let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); - let distinct_id = "user1".to_string(); + let distinct_id = "user2".to_string(); // Insert person insert_person_for_team_in_pg(postgres_reader.clone(), team.id, distinct_id.clone(), None) @@ -3148,7 +4391,7 @@ mod tests { Some(true), // ensure_experience_continuity ); - // need to convert flag to FeatureFlagRow + // Convert flag to FeatureFlagRow let flag_row = FeatureFlagRow { id: flag.id, team_id: flag.team_id, @@ -3165,8 +4408,8 @@ mod tests { .await .unwrap(); - // Attempt to set hash key override - let result = set_feature_flag_hash_key_overrides( + // Set hash key override + set_feature_flag_hash_key_overrides( postgres_writer.clone(), team.id, vec![distinct_id.clone()], @@ -3175,9 +4418,7 @@ mod tests { .await .unwrap(); - assert!(result, "Hash key override should be set successfully"); - - // Retrieve the hash key overrides + // Retrieve hash key overrides let overrides = get_feature_flag_hash_key_overrides( postgres_reader.clone(), team.id, @@ -3186,14 +4427,10 @@ mod tests { .await .unwrap(); - assert!( - !overrides.is_empty(), - "At least one hash key override should be set" - ); assert_eq!( overrides.get("test_flag"), Some(&"hash_key_2".to_string()), - "Hash key override for 'test_flag' should match the set value" + "Hash key override should match the set value" ); } @@ -3271,10 +4508,12 @@ mod tests { "Hash key override should match the set value" ); } + #[tokio::test] async fn test_evaluate_feature_flags_with_experience_continuity() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -3304,6 +4543,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3337,7 +4577,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ) @@ -3356,12 +4596,12 @@ mod tests { async fn test_evaluate_feature_flags_with_continuity_missing_override() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); let distinct_id = "user4".to_string(); - // Insert person insert_person_for_team_in_pg( postgres_reader.clone(), team.id, @@ -3385,6 +4625,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3408,7 +4649,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ) @@ -3427,12 +4668,12 @@ mod tests { async fn test_evaluate_all_feature_flags_mixed_continuity() { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); let distinct_id = "user5".to_string(); - // Insert person insert_person_for_team_in_pg( postgres_reader.clone(), team.id, @@ -3456,6 +4697,7 @@ mod tests { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3484,6 +4726,7 @@ mod tests { operator: Some(OperatorType::Gt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -3517,7 +4760,7 @@ mod tests { team.id, postgres_reader.clone(), postgres_writer.clone(), - None, + cohort_cache.clone(), None, None, ) diff --git a/rust/feature-flags/src/flag_request.rs b/rust/feature-flags/src/flag_request.rs index 771c216834c..1cf64eb879a 100644 --- a/rust/feature-flags/src/flag_request.rs +++ b/rust/feature-flags/src/flag_request.rs @@ -158,8 +158,8 @@ impl FlagRequest { pub async fn get_flags_from_cache_or_pg( &self, team_id: i32, - redis_client: Arc, - pg_client: Arc, + redis_client: &Arc, + pg_client: &Arc, ) -> Result { let mut cache_hit = false; let flags = match FeatureFlagList::from_redis(redis_client.clone(), team_id).await { @@ -167,10 +167,14 @@ impl FlagRequest { cache_hit = true; Ok(flags) } - Err(_) => match FeatureFlagList::from_pg(pg_client, team_id).await { + Err(_) => match FeatureFlagList::from_pg(pg_client.clone(), team_id).await { Ok(flags) => { - if let Err(e) = - FeatureFlagList::update_flags_in_redis(redis_client, team_id, &flags).await + if let Err(e) = FeatureFlagList::update_flags_in_redis( + redis_client.clone(), + team_id, + &flags, + ) + .await { tracing::warn!("Failed to update Redis cache: {}", e); // TODO add new metric category for this @@ -206,7 +210,6 @@ mod tests { TEAM_FLAGS_CACHE_PREFIX, }; use crate::flag_request::FlagRequest; - use crate::redis::Client as RedisClient; use crate::team::Team; use crate::test_utils::{insert_new_team_in_redis, setup_pg_reader_client, setup_redis_client}; use bytes::Bytes; @@ -360,6 +363,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(50.0), variant: None, @@ -402,6 +406,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -426,7 +431,7 @@ mod tests { // Test fetching from Redis let result = flag_request - .get_flags_from_cache_or_pg(team.id, redis_client.clone(), pg_client.clone()) + .get_flags_from_cache_or_pg(team.id, &redis_client, &pg_client) .await; assert!(result.is_ok()); let fetched_flags = result.unwrap(); @@ -483,7 +488,7 @@ mod tests { .expect("Failed to remove flags from Redis"); let result = flag_request - .get_flags_from_cache_or_pg(team.id, redis_client.clone(), pg_client.clone()) + .get_flags_from_cache_or_pg(team.id, &redis_client, &pg_client) .await; assert!(result.is_ok()); // Verify that the flags were re-added to Redis diff --git a/rust/feature-flags/src/lib.rs b/rust/feature-flags/src/lib.rs index 051b3e27697..67659bfcf9d 100644 --- a/rust/feature-flags/src/lib.rs +++ b/rust/feature-flags/src/lib.rs @@ -1,4 +1,7 @@ pub mod api; +pub mod cohort_cache; +pub mod cohort_models; +pub mod cohort_operations; pub mod config; pub mod database; pub mod feature_flag_match_reason; @@ -8,13 +11,13 @@ pub mod flag_matching; pub mod flag_request; pub mod geoip; pub mod metrics_consts; +pub mod metrics_utils; pub mod property_matching; pub mod redis; pub mod request_handler; pub mod router; pub mod server; pub mod team; -pub mod utils; pub mod v0_endpoint; // Test modules don't need to be compiled with main binary diff --git a/rust/feature-flags/src/utils.rs b/rust/feature-flags/src/metrics_utils.rs similarity index 100% rename from rust/feature-flags/src/utils.rs rename to rust/feature-flags/src/metrics_utils.rs diff --git a/rust/feature-flags/src/property_matching.rs b/rust/feature-flags/src/property_matching.rs index 8d12fe6ab5e..84479f13161 100644 --- a/rust/feature-flags/src/property_matching.rs +++ b/rust/feature-flags/src/property_matching.rs @@ -44,7 +44,7 @@ pub fn match_property( } let key = &property.key; - let operator = property.operator.clone().unwrap_or(OperatorType::Exact); + let operator = property.operator.unwrap_or(OperatorType::Exact); let value = &property.value; let match_value = matching_property_values.get(key); @@ -193,6 +193,12 @@ pub fn match_property( // Ok(false) // } } + OperatorType::In | OperatorType::NotIn => { + // TODO: we handle these in cohort matching, so we can just return false here + // because by the time we match properties, we've already decomposed the cohort + // filter into multiple property filters + Ok(false) + } } } @@ -260,6 +266,7 @@ mod test_match_properties { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -313,6 +320,7 @@ mod test_match_properties { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -335,6 +343,7 @@ mod test_match_properties { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -379,6 +388,7 @@ mod test_match_properties { operator: Some(OperatorType::IsNot), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -416,6 +426,7 @@ mod test_match_properties { operator: Some(OperatorType::IsNot), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -490,6 +501,7 @@ mod test_match_properties { operator: Some(OperatorType::IsSet), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -538,6 +550,7 @@ mod test_match_properties { operator: Some(OperatorType::Icontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -595,6 +608,7 @@ mod test_match_properties { operator: Some(OperatorType::Icontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -634,6 +648,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -674,6 +689,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( &property_b, @@ -708,6 +724,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -730,6 +747,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( &property_d, @@ -760,6 +778,7 @@ mod test_match_properties { operator: Some(OperatorType::Gt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -802,6 +821,7 @@ mod test_match_properties { operator: Some(OperatorType::Lt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -848,6 +868,7 @@ mod test_match_properties { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -889,6 +910,7 @@ mod test_match_properties { operator: Some(OperatorType::Lt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -935,6 +957,7 @@ mod test_match_properties { operator: Some(OperatorType::Lt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -1013,6 +1036,7 @@ mod test_match_properties { operator: Some(OperatorType::IsNot), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1034,6 +1058,7 @@ mod test_match_properties { operator: Some(OperatorType::IsSet), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -1049,6 +1074,7 @@ mod test_match_properties { operator: Some(OperatorType::Icontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -1070,6 +1096,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1085,6 +1112,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1118,6 +1146,7 @@ mod test_match_properties { operator: None, prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1137,6 +1166,7 @@ mod test_match_properties { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1152,6 +1182,7 @@ mod test_match_properties { operator: Some(OperatorType::IsSet), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1167,6 +1198,7 @@ mod test_match_properties { operator: Some(OperatorType::IsNotSet), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(match_property( @@ -1203,6 +1235,7 @@ mod test_match_properties { operator: Some(OperatorType::Icontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1218,6 +1251,7 @@ mod test_match_properties { operator: Some(OperatorType::NotIcontains), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1233,6 +1267,7 @@ mod test_match_properties { operator: Some(OperatorType::Regex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1248,6 +1283,7 @@ mod test_match_properties { operator: Some(OperatorType::NotRegex), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1263,6 +1299,7 @@ mod test_match_properties { operator: Some(OperatorType::Gt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1278,6 +1315,7 @@ mod test_match_properties { operator: Some(OperatorType::Gte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1293,6 +1331,7 @@ mod test_match_properties { operator: Some(OperatorType::Lt), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1308,6 +1347,7 @@ mod test_match_properties { operator: Some(OperatorType::Lte), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( @@ -1324,6 +1364,7 @@ mod test_match_properties { operator: Some(OperatorType::IsDateBefore), prop_type: "person".to_string(), group_type_index: None, + negation: None, }; assert!(!match_property( diff --git a/rust/feature-flags/src/request_handler.rs b/rust/feature-flags/src/request_handler.rs index 5e0be8faacc..5ef43896e64 100644 --- a/rust/feature-flags/src/request_handler.rs +++ b/rust/feature-flags/src/request_handler.rs @@ -1,5 +1,6 @@ use crate::{ api::{FlagError, FlagsResponse}, + cohort_cache::CohortCacheManager, database::Client, flag_definitions::FeatureFlagList, flag_matching::{FeatureFlagMatcher, GroupTypeMappingCache}, @@ -69,6 +70,7 @@ pub struct FeatureFlagEvaluationContext { feature_flags: FeatureFlagList, postgres_reader: Arc, postgres_writer: Arc, + cohort_cache: Arc, #[builder(default)] person_property_overrides: Option>, #[builder(default)] @@ -95,6 +97,7 @@ pub async fn process_request(context: RequestContext) -> Result Result = state.postgres_reader.clone(); - let postgres_writer_dyn: Arc = state.postgres_writer.clone(); - let evaluation_context = FeatureFlagEvaluationContextBuilder::default() .team_id(team_id) .distinct_id(distinct_id) .feature_flags(feature_flags_from_cache_or_pg) - .postgres_reader(postgres_reader_dyn) - .postgres_writer(postgres_writer_dyn) + .postgres_reader(state.postgres_reader.clone()) + .postgres_writer(state.postgres_writer.clone()) + .cohort_cache(state.cohort_cache.clone()) .person_property_overrides(person_property_overrides) .group_property_overrides(group_property_overrides) .groups(groups) @@ -224,8 +225,8 @@ pub async fn evaluate_feature_flags(context: FeatureFlagEvaluationContext) -> Fl context.team_id, context.postgres_reader, context.postgres_writer, + context.cohort_cache, Some(group_type_mapping_cache), - None, // TODO maybe remove this from the matcher struct, since it's used internally but not passed around context.groups, ); feature_flag_matcher @@ -359,6 +360,7 @@ mod tests { async fn test_evaluate_feature_flags() { let postgres_reader: Arc = setup_pg_reader_client(None).await; let postgres_writer: Arc = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flag = FeatureFlag { name: Some("Test Flag".to_string()), id: 1, @@ -374,6 +376,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "person".to_string(), group_type_index: None, + negation: None, }]), rollout_percentage: Some(100.0), // Set to 100% to ensure it's always on variant: None, @@ -397,6 +400,7 @@ mod tests { .feature_flags(feature_flag_list) .postgres_reader(postgres_reader) .postgres_writer(postgres_writer) + .cohort_cache(cohort_cache) .person_property_overrides(Some(person_properties)) .build() .expect("Failed to build FeatureFlagEvaluationContext"); @@ -505,6 +509,7 @@ mod tests { async fn test_evaluate_feature_flags_multiple_flags() { let postgres_reader: Arc = setup_pg_reader_client(None).await; let postgres_writer: Arc = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flags = vec![ FeatureFlag { name: Some("Flag 1".to_string()), @@ -556,6 +561,7 @@ mod tests { .feature_flags(feature_flag_list) .postgres_reader(postgres_reader) .postgres_writer(postgres_writer) + .cohort_cache(cohort_cache) .build() .expect("Failed to build FeatureFlagEvaluationContext"); @@ -608,6 +614,7 @@ mod tests { async fn test_evaluate_feature_flags_with_overrides() { let postgres_reader: Arc = setup_pg_reader_client(None).await; let postgres_writer: Arc = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let team = insert_new_team_in_pg(postgres_reader.clone(), None) .await .unwrap(); @@ -627,6 +634,7 @@ mod tests { operator: Some(OperatorType::Exact), prop_type: "group".to_string(), group_type_index: Some(0), + negation: None, }]), rollout_percentage: Some(100.0), variant: None, @@ -655,6 +663,7 @@ mod tests { .feature_flags(feature_flag_list) .postgres_reader(postgres_reader) .postgres_writer(postgres_writer) + .cohort_cache(cohort_cache) .group_property_overrides(Some(group_property_overrides)) .groups(Some(groups)) .build() @@ -688,6 +697,7 @@ mod tests { let long_id = "a".repeat(1000); let postgres_reader: Arc = setup_pg_reader_client(None).await; let postgres_writer: Arc = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let flag = FeatureFlag { name: Some("Test Flag".to_string()), id: 1, @@ -717,6 +727,7 @@ mod tests { .feature_flags(feature_flag_list) .postgres_reader(postgres_reader) .postgres_writer(postgres_writer) + .cohort_cache(cohort_cache) .build() .expect("Failed to build FeatureFlagEvaluationContext"); diff --git a/rust/feature-flags/src/router.rs b/rust/feature-flags/src/router.rs index 505f18adfb0..e34ea31a3c6 100644 --- a/rust/feature-flags/src/router.rs +++ b/rust/feature-flags/src/router.rs @@ -9,11 +9,12 @@ use health::HealthRegistry; use tower::limit::ConcurrencyLimitLayer; use crate::{ + cohort_cache::CohortCacheManager, config::{Config, TeamIdsToTrack}, database::Client as DatabaseClient, geoip::GeoIpClient, + metrics_utils::team_id_label_filter, redis::Client as RedisClient, - utils::team_id_label_filter, v0_endpoint, }; @@ -22,6 +23,7 @@ pub struct State { pub redis: Arc, pub postgres_reader: Arc, pub postgres_writer: Arc, + pub cohort_cache: Arc, // TODO does this need a better name than just `cohort_cache`? pub geoip: Arc, pub team_ids_to_track: TeamIdsToTrack, } @@ -30,6 +32,7 @@ pub fn router( redis: Arc, postgres_reader: Arc, postgres_writer: Arc, + cohort_cache: Arc, geoip: Arc, liveness: HealthRegistry, config: Config, @@ -42,6 +45,7 @@ where redis, postgres_reader, postgres_writer, + cohort_cache, geoip, team_ids_to_track: config.team_ids_to_track.clone(), }; diff --git a/rust/feature-flags/src/server.rs b/rust/feature-flags/src/server.rs index c9e238fa8fd..69ff759ddfc 100644 --- a/rust/feature-flags/src/server.rs +++ b/rust/feature-flags/src/server.rs @@ -6,6 +6,7 @@ use std::time::Duration; use health::{HealthHandle, HealthRegistry}; use tokio::net::TcpListener; +use crate::cohort_cache::CohortCacheManager; use crate::config::Config; use crate::database::get_pool; use crate::geoip::GeoIpClient; @@ -54,6 +55,8 @@ where } }; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); + let health = HealthRegistry::new("liveness"); // TODO - we don't have a more complex health check yet, but we should add e.g. some around DB operations @@ -67,6 +70,7 @@ where redis_client, postgres_reader, postgres_writer, + cohort_cache, geoip_service, health, config, diff --git a/rust/feature-flags/src/team.rs b/rust/feature-flags/src/team.rs index 0fa75f0bd3d..f13cf29094b 100644 --- a/rust/feature-flags/src/team.rs +++ b/rust/feature-flags/src/team.rs @@ -42,7 +42,7 @@ impl Team { // TODO: Consider an LRU cache for teams as well, with small TTL to skip redis/pg lookups let team: Team = serde_json::from_str(&serialized_team).map_err(|e| { tracing::error!("failed to parse data to team: {}", e); - FlagError::DataParsingError + FlagError::RedisDataParsingError })?; Ok(team) @@ -55,7 +55,7 @@ impl Team { ) -> Result<(), FlagError> { let serialized_team = serde_json::to_string(&team).map_err(|e| { tracing::error!("Failed to serialize team: {}", e); - FlagError::DataParsingError + FlagError::RedisDataParsingError })?; client @@ -173,7 +173,7 @@ mod tests { let client = setup_redis_client(None); match Team::from_redis(client.clone(), team.api_token.clone()).await { - Err(FlagError::DataParsingError) => (), + Err(FlagError::RedisDataParsingError) => (), Err(other) => panic!("Expected DataParsingError, got {:?}", other), Ok(_) => panic!("Expected DataParsingError"), }; diff --git a/rust/feature-flags/src/test_utils.rs b/rust/feature-flags/src/test_utils.rs index 32a2016bf75..346ed106ea6 100644 --- a/rust/feature-flags/src/test_utils.rs +++ b/rust/feature-flags/src/test_utils.rs @@ -1,11 +1,12 @@ use anyhow::Error; use axum::async_trait; use serde_json::{json, Value}; -use sqlx::{pool::PoolConnection, postgres::PgRow, Error as SqlxError, PgPool, Postgres}; +use sqlx::{pool::PoolConnection, postgres::PgRow, Error as SqlxError, Postgres, Row}; use std::sync::Arc; use uuid::Uuid; use crate::{ + cohort_models::Cohort, config::{Config, DEFAULT_TEST_CONFIG}, database::{get_pool, Client, CustomDatabaseError}, flag_definitions::{self, FeatureFlag, FeatureFlagRow}, @@ -23,7 +24,9 @@ pub fn random_string(prefix: &str, length: usize) -> String { format!("{}{}", prefix, suffix) } -pub async fn insert_new_team_in_redis(client: Arc) -> Result { +pub async fn insert_new_team_in_redis( + client: Arc, +) -> Result { let id = rand::thread_rng().gen_range(0..10_000_000); let token = random_string("phc_", 12); let team = Team { @@ -48,7 +51,7 @@ pub async fn insert_new_team_in_redis(client: Arc) -> Result, + client: Arc, team_id: i32, json_value: Option, ) -> Result<(), Error> { @@ -88,7 +91,7 @@ pub async fn insert_flags_for_team_in_redis( Ok(()) } -pub fn setup_redis_client(url: Option) -> Arc { +pub fn setup_redis_client(url: Option) -> Arc { let redis_url = match url { Some(value) => value, None => "redis://localhost:6379/".to_string(), @@ -130,7 +133,7 @@ pub fn create_flag_from_json(json_value: Option) -> Vec { flags } -pub async fn setup_pg_reader_client(config: Option<&Config>) -> Arc { +pub async fn setup_pg_reader_client(config: Option<&Config>) -> Arc { let config = config.unwrap_or(&DEFAULT_TEST_CONFIG); Arc::new( get_pool(&config.read_database_url, config.max_pg_connections) @@ -139,7 +142,7 @@ pub async fn setup_pg_reader_client(config: Option<&Config>) -> Arc { ) } -pub async fn setup_pg_writer_client(config: Option<&Config>) -> Arc { +pub async fn setup_pg_writer_client(config: Option<&Config>) -> Arc { let config = config.unwrap_or(&DEFAULT_TEST_CONFIG); Arc::new( get_pool(&config.write_database_url, config.max_pg_connections) @@ -261,7 +264,7 @@ pub async fn insert_new_team_in_pg( } pub async fn insert_flag_for_team_in_pg( - client: Arc, + client: Arc, team_id: i32, flag: Option, ) -> Result { @@ -310,11 +313,12 @@ pub async fn insert_flag_for_team_in_pg( } pub async fn insert_person_for_team_in_pg( - client: Arc, + client: Arc, team_id: i32, distinct_id: String, properties: Option, -) -> Result<(), Error> { +) -> Result { + // Changed return type to Result let payload = match properties { Some(value) => value, None => json!({ @@ -326,7 +330,7 @@ pub async fn insert_person_for_team_in_pg( let uuid = Uuid::now_v7(); let mut conn = client.get_connection().await?; - let res = sqlx::query( + let row = sqlx::query( r#" WITH inserted_person AS ( INSERT INTO posthog_person ( @@ -334,10 +338,11 @@ pub async fn insert_person_for_team_in_pg( properties_last_operation, team_id, is_user_id, is_identified, uuid, version ) VALUES ('2023-04-05', $1, '{}', '{}', $2, NULL, true, $3, 0) - RETURNING * + RETURNING id ) INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($4, (SELECT id FROM inserted_person), $5, 0) + RETURNING person_id "#, ) .bind(&payload) @@ -345,10 +350,109 @@ pub async fn insert_person_for_team_in_pg( .bind(uuid) .bind(&distinct_id) .bind(team_id) + .fetch_one(&mut *conn) + .await?; + + let person_id: i32 = row.get::("person_id"); + Ok(person_id) +} + +pub async fn insert_cohort_for_team_in_pg( + client: Arc, + team_id: i32, + name: Option, + filters: serde_json::Value, + is_static: bool, +) -> Result { + let cohort = Cohort { + id: 0, // Placeholder, will be updated after insertion + name: name.unwrap_or("Test Cohort".to_string()), + description: Some("Description for cohort".to_string()), + team_id, + deleted: false, + filters, + query: None, + version: Some(1), + pending_version: None, + count: None, + is_calculating: false, + is_static, + errors_calculating: 0, + groups: serde_json::json!([]), + created_by_id: None, + }; + + let mut conn = client.get_connection().await?; + let row: (i32,) = sqlx::query_as( + r#"INSERT INTO posthog_cohort + (name, description, team_id, deleted, filters, query, version, pending_version, count, is_calculating, is_static, errors_calculating, groups, created_by_id) VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) + RETURNING id"#, + ) + .bind(&cohort.name) + .bind(&cohort.description) + .bind(cohort.team_id) + .bind(cohort.deleted) + .bind(&cohort.filters) + .bind(&cohort.query) + .bind(cohort.version) + .bind(cohort.pending_version) + .bind(cohort.count) + .bind(cohort.is_calculating) + .bind(cohort.is_static) + .bind(cohort.errors_calculating) + .bind(&cohort.groups) + .bind(cohort.created_by_id) + .fetch_one(&mut *conn) + .await?; + + // Update the cohort_row with the actual id generated by sqlx + let id = row.0; + + Ok(Cohort { id, ..cohort }) +} + +pub async fn get_person_id_by_distinct_id( + client: Arc, + team_id: i32, + distinct_id: &str, +) -> Result { + let mut conn = client.get_connection().await?; + let row: (i32,) = sqlx::query_as( + r#"SELECT id FROM posthog_person + WHERE team_id = $1 AND id = ( + SELECT person_id FROM posthog_persondistinctid + WHERE team_id = $1 AND distinct_id = $2 + LIMIT 1 + ) + LIMIT 1"#, + ) + .bind(team_id) + .bind(distinct_id) + .fetch_one(&mut *conn) + .await + .map_err(|_| anyhow::anyhow!("Person not found"))?; + + Ok(row.0) +} + +pub async fn add_person_to_cohort( + client: Arc, + person_id: i32, + cohort_id: i32, +) -> Result<(), Error> { + let mut conn = client.get_connection().await?; + let res = sqlx::query( + r#"INSERT INTO posthog_cohortpeople (cohort_id, person_id) + VALUES ($1, $2) + ON CONFLICT DO NOTHING"#, + ) + .bind(cohort_id) + .bind(person_id) .execute(&mut *conn) .await?; - assert_eq!(res.rows_affected(), 1); + assert!(res.rows_affected() > 0, "Failed to add person to cohort"); Ok(()) } diff --git a/rust/feature-flags/tests/test_flag_matching_consistency.rs b/rust/feature-flags/tests/test_flag_matching_consistency.rs index 94f4f67dcdc..c632d28bc15 100644 --- a/rust/feature-flags/tests/test_flag_matching_consistency.rs +++ b/rust/feature-flags/tests/test_flag_matching_consistency.rs @@ -1,3 +1,6 @@ +use std::sync::Arc; + +use feature_flags::cohort_cache::CohortCacheManager; use feature_flags::feature_flag_match_reason::FeatureFlagMatchReason; /// These tests are common between all libraries doing local evaluation of feature flags. /// This ensures there are no mismatches between implementations. @@ -110,6 +113,7 @@ async fn it_is_consistent_with_rollout_calculation_for_simple_flags() { for (i, result) in results.iter().enumerate().take(1000) { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let distinct_id = format!("distinct_id_{}", i); @@ -118,7 +122,7 @@ async fn it_is_consistent_with_rollout_calculation_for_simple_flags() { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, ) @@ -1209,6 +1213,7 @@ async fn it_is_consistent_with_rollout_calculation_for_multivariate_flags() { for (i, result) in results.iter().enumerate().take(1000) { let postgres_reader = setup_pg_reader_client(None).await; let postgres_writer = setup_pg_writer_client(None).await; + let cohort_cache = Arc::new(CohortCacheManager::new(postgres_reader.clone(), None, None)); let distinct_id = format!("distinct_id_{}", i); let feature_flag_match = FeatureFlagMatcher::new( @@ -1216,7 +1221,7 @@ async fn it_is_consistent_with_rollout_calculation_for_multivariate_flags() { 1, postgres_reader, postgres_writer, - None, + cohort_cache, None, None, )