([
},
createProjectSuccess: ({ currentProject }) => {
if (currentProject) {
- actions.switchTeam(currentProject.id, urls.products())
+ const useUseCaseSelection = values.featureFlags[FEATURE_FLAGS.ONBOARDING_USE_CASE_SELECTION] === true
+ const redirectUrl = useUseCaseSelection ? urls.useCaseSelection() : urls.products()
+ actions.switchTeam(currentProject.id, redirectUrl)
}
},
diff --git a/frontend/src/scenes/retention/RetentionGraph.tsx b/frontend/src/scenes/retention/RetentionGraph.tsx
index 83e5d3e885..51f1e74cdf 100644
--- a/frontend/src/scenes/retention/RetentionGraph.tsx
+++ b/frontend/src/scenes/retention/RetentionGraph.tsx
@@ -35,6 +35,7 @@ export function RetentionGraph({ inSharedMode = false }: RetentionGraphProps): J
aggregationGroupTypeIndex,
shouldShowMeanPerBreakdown,
showTrendLines,
+ xAxisLabels,
} = useValues(retentionGraphLogic(insightProps))
const { openModal } = useActions(retentionModalLogic(insightProps))
@@ -53,7 +54,7 @@ export function RetentionGraph({ inSharedMode = false }: RetentionGraphProps): J
data-attr="trend-line-graph"
type={displayTypeToGraphType(retentionFilter?.display || ChartDisplayType.ActionsLineGraph)}
datasets={filteredTrendSeries as GraphDataset[]}
- labels={(filteredTrendSeries[0] && filteredTrendSeries[0].labels) || []}
+ labels={xAxisLabels}
isInProgress={incompletenessOffsetFromEnd < 0}
inSharedMode={!!inSharedMode}
showPersonsModal={false}
diff --git a/frontend/src/scenes/retention/RetentionTable.tsx b/frontend/src/scenes/retention/RetentionTable.tsx
index 5743751912..980ec65f73 100644
--- a/frontend/src/scenes/retention/RetentionTable.tsx
+++ b/frontend/src/scenes/retention/RetentionTable.tsx
@@ -7,12 +7,12 @@ import React from 'react'
import { IconChevronDown, IconChevronRight } from '@posthog/icons'
import { Tooltip } from 'lib/lemon-ui/Tooltip'
-import { gradateColor, range } from 'lib/utils'
+import { gradateColor } from 'lib/utils'
import { insightLogic } from 'scenes/insights/insightLogic'
import { themeLogic } from '~/layout/navigation-3000/themeLogic'
-import { DEFAULT_RETENTION_TOTAL_INTERVALS, OVERALL_MEAN_KEY } from './retentionLogic'
+import { OVERALL_MEAN_KEY } from './retentionLogic'
import { retentionModalLogic } from './retentionModalLogic'
import { retentionTableLogic } from './retentionTableLogic'
import { NO_BREAKDOWN_VALUE } from './types'
@@ -30,12 +30,13 @@ export function RetentionTable({
hideSizeColumn,
retentionVizOptions,
theme,
- retentionFilter,
expandedBreakdowns,
retentionMeans,
breakdownDisplayNames,
+ tableHeaders,
+ retentionFilter,
} = useValues(retentionTableLogic(insightProps))
- const { toggleBreakdown, setSelectedInterval, setHoveredColumn } = useActions(retentionTableLogic(insightProps))
+ const { toggleBreakdown } = useActions(retentionTableLogic(insightProps))
const { hoveredColumn } = useValues(retentionTableLogic(insightProps))
const { openModal } = useActions(retentionModalLogic(insightProps))
@@ -46,7 +47,6 @@ export function RetentionTable({
const backgroundColorMean = theme?.['preset-2'] || '#000000' // Default to black if no color found
const { isDarkModeOn } = useValues(themeLogic)
- const totalIntervals = retentionFilter?.totalIntervals ?? DEFAULT_RETENTION_TOTAL_INTERVALS
// only one breakdown value so don't need to highlight using different colors/autoexpand it
const isSingleBreakdown = Object.keys(tableRowsSplitByBreakdownValue).length === 1
@@ -68,26 +68,8 @@ export function RetentionTable({
| Cohort |
{!hideSizeColumn && Size | }
- {range(0, totalIntervals).map((interval) => (
- {
- if (!allowSelectingColumns) {
- return
- }
- const newInterval = selectedInterval === interval ? null : interval
- setSelectedInterval(newInterval)
- }}
- onMouseEnter={() => setHoveredColumn(interval)}
- onMouseLeave={() => setHoveredColumn(null)}
- >
- {`${retentionFilter?.period} ${interval}`}
- |
+ {tableHeaders.map((header) => (
+ {header} |
))}
@@ -133,14 +115,8 @@ export function RetentionTable({
)}
- {range(0, totalIntervals).map((interval) => (
-
+ {tableHeaders.map((_, interval) => (
+ |
{row.cohortSize}
|
)}
- {range(0, totalIntervals).map((columnIndex) => {
+ {tableHeaders.map((_, columnIndex) => {
const column = row.values[columnIndex]
return (
`${periodName}_${intervalNumber}`)
+ // For custom brackets, we need to use the number of brackets + 1 for column count
+ // Otherwise use totalIntervals
+ const customBrackets = query.retentionFilter.retentionCustomBrackets
+ const columnCount = customBrackets && customBrackets.length > 0 ? customBrackets.length + 1 : totalIntervals
+ const selects = Array.from({ length: columnCount }, (_, intervalNumber) => `${periodName}_${intervalNumber}`)
return setLatestVersionsOnQuery(
{
kind: NodeKind.ActorsQuery,
diff --git a/frontend/src/scenes/retention/retentionGraphLogic.ts b/frontend/src/scenes/retention/retentionGraphLogic.ts
index 59873ce838..1bb095f75b 100644
--- a/frontend/src/scenes/retention/retentionGraphLogic.ts
+++ b/frontend/src/scenes/retention/retentionGraphLogic.ts
@@ -3,6 +3,7 @@ import { connect, kea, key, path, props, selectors } from 'kea'
import { QUnitType, dayjs } from 'lib/dayjs'
import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic'
import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils'
+import { dateOptionPlurals } from 'scenes/retention/constants'
import { ProcessedRetentionPayload, RetentionTrendPayload } from 'scenes/retention/types'
import { teamLogic } from 'scenes/teamLogic'
@@ -256,5 +257,33 @@ export const retentionGraphLogic = kea([
return trendSeries.filter((series) => series.breakdown_value === selectedBreakdownValue)
},
],
+
+ xAxisLabels: [
+ (s) => [s.retentionFilter, s.results],
+ (retentionFilter, results) => {
+ if (!retentionFilter || !results) {
+ return []
+ }
+ const { period, retentionCustomBrackets } = retentionFilter
+ const unit = dateOptionPlurals[period || 'Day']
+
+ if (retentionCustomBrackets) {
+ const labels = [`${period || 'Day'} 0`]
+ let cumulativeTotal = 1
+ for (const bracketSize of retentionCustomBrackets) {
+ const start = cumulativeTotal
+ const end = cumulativeTotal + bracketSize - 1
+ if (start === end) {
+ labels.push(`${unit} ${start}`)
+ } else {
+ labels.push(`${unit} ${start}-${end}`)
+ }
+ cumulativeTotal += bracketSize
+ }
+ return labels
+ }
+ return results?.[0]?.values.map((_, i) => `${period} ${i}`)
+ },
+ ],
}),
])
diff --git a/frontend/src/scenes/retention/retentionLogic.ts b/frontend/src/scenes/retention/retentionLogic.ts
index 5b3f9338ed..3c92c79e5e 100644
--- a/frontend/src/scenes/retention/retentionLogic.ts
+++ b/frontend/src/scenes/retention/retentionLogic.ts
@@ -1,10 +1,10 @@
import { mean, sum } from 'd3'
-import { actions, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
+import { actions, connect, events, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { CUSTOM_OPTION_KEY } from 'lib/components/DateFilter/types'
import { dayjs } from 'lib/dayjs'
import { formatDateRange } from 'lib/utils'
-import { QuerySourceUpdate, insightVizDataLogic } from 'scenes/insights/insightVizDataLogic'
+import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic'
import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils'
import { BREAKDOWN_OTHER_DISPLAY, BREAKDOWN_OTHER_STRING_LABEL, formatBreakdownLabel } from 'scenes/insights/utils'
import { ProcessedRetentionPayload } from 'scenes/retention/types'
@@ -21,6 +21,7 @@ const DEFAULT_RETENTION_LOGIC_KEY = 'default_retention_key'
export const OVERALL_MEAN_KEY = '__overall__'
export const DEFAULT_RETENTION_TOTAL_INTERVALS = 8
export const RETENTION_EMPTY_BREAKDOWN_VALUE = '(empty)'
+export const MAX_BRACKETS = 30
// Define a type for the output of the retentionMeans selector
export interface MeanRetentionValue {
@@ -50,7 +51,10 @@ export const retentionLogic = kea([
})),
actions({
setSelectedBreakdownValue: (value: string | number | boolean | null) => ({ value }),
- setSelectedInterval: (interval: number | null) => ({ interval }),
+ setLocalCustomBrackets: (brackets: (string | number)[]) => ({ brackets }),
+ updateLocalCustomBracket: (index: number, value: number | undefined) => ({ index, value }),
+ addCustomBracket: () => ({}),
+ removeCustomBracket: (index: number) => ({ index }),
}),
listeners(({ actions, values }) => ({
updateBreakdownFilter: () => {
@@ -58,16 +62,50 @@ export const retentionLogic = kea([
// This prevents the dropdown from showing invalid cohort IDs
actions.setSelectedBreakdownValue(null)
},
- updateInsightFilter: () => {
- actions.setSelectedInterval(null)
+ updateLocalCustomBracket: async (_, breakpoint) => {
+ await breakpoint(1000)
+ const { localCustomBrackets, retentionFilter } = values
+ const numericBrackets = localCustomBrackets
+ .map((b) => (typeof b === 'string' ? parseInt(b, 10) : b))
+ .filter((b): b is number => !isNaN(Number(b)) && Number(b) > 0)
+
+ if (JSON.stringify(numericBrackets) !== JSON.stringify(retentionFilter?.retentionCustomBrackets || [])) {
+ actions.updateInsightFilter({
+ retentionCustomBrackets: numericBrackets.length > 0 ? numericBrackets : undefined,
+ })
+ }
},
- setSelectedInterval: ({ interval }) => {
- actions.updateQuerySource({
- retentionFilter: {
- ...values.retentionFilter,
- selectedInterval: interval,
- },
- } as QuerySourceUpdate)
+ updateInsightFilter: ({ insightFilter }) => {
+ const retentionFilter = insightFilter as RetentionFilter
+ if (retentionFilter.retentionCustomBrackets !== undefined) {
+ const { localCustomBrackets } = values
+ const numericLocal = localCustomBrackets
+ .map((b) => (typeof b === 'string' ? parseInt(b, 10) : b))
+ .filter((b): b is number => !isNaN(Number(b)) && Number(b) > 0)
+
+ const incomingBrackets = retentionFilter.retentionCustomBrackets || []
+
+ if (JSON.stringify(numericLocal) !== JSON.stringify(incomingBrackets)) {
+ actions.setLocalCustomBrackets([...incomingBrackets])
+ }
+ }
+ },
+ removeCustomBracket: async (_, breakpoint) => {
+ await breakpoint(1000)
+ const { localCustomBrackets, retentionFilter } = values
+ const numericBrackets = localCustomBrackets
+ .map((b) => (typeof b === 'string' ? parseInt(b, 10) : b))
+ .filter((b): b is number => !isNaN(Number(b)) && Number(b) > 0)
+
+ // Only update if we still have at least one bracket (even if empty) to keep custom brackets enabled
+ if (
+ localCustomBrackets.length > 0 &&
+ JSON.stringify(numericBrackets) !== JSON.stringify(retentionFilter?.retentionCustomBrackets || [])
+ ) {
+ actions.updateInsightFilter({
+ retentionCustomBrackets: numericBrackets.length > 0 ? numericBrackets : [],
+ })
+ }
},
})),
reducers({
@@ -77,6 +115,31 @@ export const retentionLogic = kea([
setSelectedBreakdownValue: (_, { value }) => value,
},
],
+ localCustomBrackets: [
+ [] as (string | number)[],
+ {
+ setLocalCustomBrackets: (_, { brackets }) => brackets,
+ updateLocalCustomBracket: (state, { index, value }) => {
+ const newBrackets = [...state]
+ newBrackets[index] = value ?? ''
+ return newBrackets
+ },
+ addCustomBracket: (state) => {
+ if (state.length >= MAX_BRACKETS) {
+ return state
+ }
+ return [...state, '']
+ },
+ removeCustomBracket: (state, { index }) => {
+ const newBrackets = [...state]
+ newBrackets.splice(index, 1)
+ if (newBrackets.filter((b) => b !== '').length === 0) {
+ return ['']
+ }
+ return newBrackets
+ },
+ },
+ ],
}),
selectors({
hasValidBreakdown: [(s) => [s.breakdownFilter], (breakdownFilter) => isValidBreakdown(breakdownFilter)],
@@ -151,7 +214,9 @@ export const retentionLogic = kea([
return {}
}
- const { totalIntervals = DEFAULT_RETENTION_TOTAL_INTERVALS, meanRetentionCalculation } = retentionFilter
+ const { meanRetentionCalculation } = retentionFilter
+ const numIntervals = results.length > 0 ? results[0].values.length : 0
+
const groupedByBreakdown: Record = {}
if (hasValidBreakdown) {
@@ -181,7 +246,7 @@ export const retentionLogic = kea([
? 'Overall'
: (breakdownRows[0]?.breakdown_value ?? null)
- for (let intervalIndex = 0; intervalIndex < totalIntervals; intervalIndex++) {
+ for (let intervalIndex = 0; intervalIndex < numIntervals; intervalIndex++) {
const validRows = breakdownRows.filter(
(row) =>
row.values[intervalIndex] && // Ensure data for this interval exists
@@ -310,4 +375,12 @@ export const retentionLogic = kea([
},
],
}),
+ events(({ actions, values }) => ({
+ afterMount: () => {
+ const brackets = values.retentionFilter?.retentionCustomBrackets
+ if (brackets !== undefined) {
+ actions.setLocalCustomBrackets([...brackets])
+ }
+ },
+ })),
])
diff --git a/frontend/src/scenes/retention/retentionTableLogic.ts b/frontend/src/scenes/retention/retentionTableLogic.ts
index 0a997404e5..a3182cebdc 100644
--- a/frontend/src/scenes/retention/retentionTableLogic.ts
+++ b/frontend/src/scenes/retention/retentionTableLogic.ts
@@ -1,10 +1,14 @@
import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
+import { capitalizeFirstLetter } from 'lib/utils'
import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic'
import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils'
+import { InsightQueryNode } from '~/queries/schema/schema-general'
+import { isRetentionQuery } from '~/queries/utils'
import { InsightLogicProps, InsightType } from '~/types'
+import { dateOptionPlurals } from './constants'
import { retentionLogic } from './retentionLogic'
import type { retentionTableLogicType } from './retentionTableLogicType'
import { NO_BREAKDOWN_VALUE, ProcessedRetentionPayload, RetentionTableRow } from './types'
@@ -18,11 +22,11 @@ export const retentionTableLogic = kea([
connect((props: InsightLogicProps) => ({
values: [
insightVizDataLogic(props),
- ['dateRange', 'retentionFilter', 'vizSpecificOptions', 'theme'],
+ ['dateRange', 'retentionFilter', 'vizSpecificOptions', 'theme', 'insightQuery'],
retentionLogic(props),
['results', 'filteredResults', 'selectedBreakdownValue', 'retentionMeans', 'breakdownDisplayNames'],
],
- actions: [retentionLogic(props), ['setSelectedBreakdownValue', 'setSelectedInterval']],
+ actions: [retentionLogic(props), ['setSelectedBreakdownValue']],
})),
actions({
@@ -106,21 +110,49 @@ export const retentionTableLogic = kea([
},
],
+ tableHeaders: [
+ (s) => [s.results, s.insightQuery],
+ (results: ProcessedRetentionPayload[], insightQuery: InsightQueryNode | null): string[] => {
+ if (results.length > 0 && results[0].values.length > 0) {
+ if (isRetentionQuery(insightQuery) && insightQuery.retentionFilter?.retentionCustomBrackets) {
+ const { period, retentionCustomBrackets } = insightQuery.retentionFilter
+ const unit = capitalizeFirstLetter(dateOptionPlurals[period || 'Day'])
+ const labels = [`${period || 'Day'} 0`]
+ let cumulativeTotal = 1
+ for (const bracketSize of retentionCustomBrackets) {
+ const start = cumulativeTotal
+ const end = cumulativeTotal + bracketSize - 1
+ if (start === end) {
+ labels.push(`${unit} ${start}`)
+ } else {
+ labels.push(`${unit} ${start}-${end}`)
+ }
+ cumulativeTotal += bracketSize
+ }
+ return labels
+ }
+ if (isRetentionQuery(insightQuery)) {
+ return results[0].values.map((_, i) => `${insightQuery.retentionFilter?.period || 'Day'} ${i}`)
+ }
+ }
+ return []
+ },
+ ],
tableRowsSplitByBreakdownValue: [
(s) => [s.tableRows],
- (tableRows): Record =>
- tableRows.reduce(
+ (tableRows) => {
+ return tableRows.reduce(
(acc, row) => {
const breakdownValue = row.breakdown_value ?? NO_BREAKDOWN_VALUE
acc[breakdownValue] = [...(acc[breakdownValue] || []), row]
return acc
},
{} as Record
- ),
+ )
+ },
],
}),
])
-
// Helper function to auto-expand a single breakdown
function autoExpandSingleBreakdown(
tableRowsSplitByBreakdownValue: Record,
diff --git a/frontend/src/scenes/retention/types.ts b/frontend/src/scenes/retention/types.ts
index bad0673a3b..051ec4ac78 100644
--- a/frontend/src/scenes/retention/types.ts
+++ b/frontend/src/scenes/retention/types.ts
@@ -6,6 +6,7 @@ export const NO_BREAKDOWN_VALUE = '$$__posthog_...__$$'
export interface ProcessedRetentionValue {
count: number
+ label: string
percentage: number
cellDate: Dayjs
isCurrentPeriod: boolean
diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx
index b36a640e4e..b8567ff8cb 100644
--- a/frontend/src/scenes/saved-insights/SavedInsights.tsx
+++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx
@@ -377,6 +377,12 @@ export const QUERY_TYPES_METADATA: Record = {
icon: IconPieChart,
inMenu: true,
},
+ [NodeKind.SessionsQuery]: {
+ name: 'Sessions',
+ description: 'List and explore sessions.',
+ icon: IconTableChart,
+ inMenu: false,
+ },
[NodeKind.RevenueExampleEventsQuery]: {
name: 'Revenue Example Events',
description: 'Revenue Example Events Query.',
diff --git a/frontend/src/scenes/sceneLogic.tsx b/frontend/src/scenes/sceneLogic.tsx
index 3e4469be15..d1357c313a 100644
--- a/frontend/src/scenes/sceneLogic.tsx
+++ b/frontend/src/scenes/sceneLogic.tsx
@@ -9,12 +9,12 @@ import { useEffect, useState } from 'react'
import api from 'lib/api'
import { commandBarLogic } from 'lib/components/CommandBar/commandBarLogic'
import { BarStatus } from 'lib/components/CommandBar/types'
-import { TeamMembershipLevel } from 'lib/constants'
+import { FEATURE_FLAGS, TeamMembershipLevel } from 'lib/constants'
import { trackFileSystemLogView } from 'lib/hooks/useFileSystemLogView'
import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast'
import { Spinner } from 'lib/lemon-ui/Spinner'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
-import { getRelativeNextPath, identifierToHuman } from 'lib/utils'
+import { getRelativeNextPath, identifierToHuman, isMac } from 'lib/utils'
import { getAppContext, getCurrentTeamIdOrNone } from 'lib/utils/getAppContext'
import { NEW_INTERNAL_TAB } from 'lib/utils/newInternalTab'
import { addProjectIdIfMissing, removeProjectIdIfPresent } from 'lib/utils/router-utils'
@@ -45,6 +45,7 @@ import { AccessControlLevel, OnboardingStepKey, ProductKey } from '~/types'
import { preflightLogic } from './PreflightCheck/preflightLogic'
import { handleLoginRedirect } from './authentication/loginLogic'
import { billingLogic } from './billing/billingLogic'
+import { newTabSceneLogic } from './new-tab/newTabSceneLogic'
import { organizationLogic } from './organizationLogic'
import type { sceneLogicType } from './sceneLogicType'
import { inviteLogic } from './settings/organization/inviteLogic'
@@ -270,6 +271,7 @@ const productsNotDependingOnEventIngestion: ProductKey[] = [ProductKey.DATA_WARE
const pathPrefixesOnboardingNotRequiredFor = [
urls.onboarding(''),
+ urls.useCaseSelection(),
urls.products(),
'/settings',
urls.organizationBilling(),
@@ -429,8 +431,8 @@ export const sceneLogic = kea([
pathname: addProjectIdIfMissing(pathname),
search,
hash,
- title: 'New tab',
- iconType: 'blank',
+ title: 'Search',
+ iconType: 'search',
pinned: false,
}
return sortTabsPinnedFirst([...baseTabs, newTab])
@@ -453,8 +455,8 @@ export const sceneLogic = kea([
pathname: '/new',
search: '',
hash: '',
- title: 'New tab',
- iconType: 'blank',
+ title: 'Search',
+ iconType: 'search',
pinned: false,
})
}
@@ -812,6 +814,12 @@ export const sceneLogic = kea([
(titleAndIcon) => titleAndIcon as { title: string; iconType: FileSystemIconType | 'loading' | 'blank' },
{ resultEqualityCheck: equal },
],
+ firstTabIsActive: [
+ (s) => [s.activeTabId, s.tabs],
+ (activeTabId, tabs): boolean => {
+ return activeTabId === tabs[0]?.id
+ },
+ ],
}),
listeners(({ values, actions, cache, props, selectors }) => ({
[NEW_INTERNAL_TAB]: (payload) => {
@@ -1115,13 +1123,22 @@ export const sceneLogic = kea([
) &&
!teamLogic.values.currentTeam?.ingested_event
) {
- console.warn('No onboarding completed, redirecting to /products')
-
const nextUrl =
getRelativeNextPath(params.searchParams.next, location) ??
removeProjectIdIfPresent(location.pathname)
- router.actions.replace(urls.products(), nextUrl ? { next: nextUrl } : undefined)
+ // Default to false (products page) if feature flags haven't loaded yet
+ const useUseCaseSelection =
+ values.featureFlags[FEATURE_FLAGS.ONBOARDING_USE_CASE_SELECTION] === true
+
+ if (useUseCaseSelection) {
+ router.actions.replace(
+ urls.useCaseSelection(),
+ nextUrl ? { next: nextUrl } : undefined
+ )
+ } else {
+ router.actions.replace(urls.products(), nextUrl ? { next: nextUrl } : undefined)
+ }
return
}
@@ -1539,21 +1556,60 @@ export const sceneLogic = kea([
afterMount(({ actions, cache, values }) => {
cache.disposables.add(() => {
const onKeyDown = (event: KeyboardEvent): void => {
- if ((event.ctrlKey || event.metaKey) && event.key === 'b') {
+ const commandKey = isMac() ? event.metaKey : event.ctrlKey
+ const optionKey = event.altKey
+ const keyCode = event.code?.toLowerCase()
+ const key = event.key?.toLowerCase()
+ const activeTab = values.activeTab
+
+ // Handle both physical key and typed character (cross-layout support).
+ const isTKey = keyCode === 'keyt' || key === 't'
+ const isWKey = keyCode === 'keyw' || key === 'w'
+ const isKKey = keyCode === 'keyk' || key === 'k'
+
+ // New shortcuts: Command+Option+T for new tab, Command+Option+W for close tab
+ if (commandKey && optionKey) {
const element = event.target as HTMLElement
if (element?.closest('.NotebookEditor')) {
return
}
- event.preventDefault()
- event.stopPropagation()
- if (event.shiftKey) {
- if (values.activeTab) {
- actions.removeTab(values.activeTab)
- }
- } else {
+ if (isTKey) {
+ event.preventDefault()
+ event.stopPropagation()
actions.newTab()
+ return
}
+
+ if (isWKey) {
+ event.preventDefault()
+ event.stopPropagation()
+ if (activeTab) {
+ actions.removeTab(activeTab)
+ }
+ return
+ }
+ }
+
+ // If cmd k, open current page new tab page / if already on the new tab page focus the search input
+ if (commandKey && isKKey) {
+ if (removeProjectIdIfPresent(router.values.location.pathname) === urls.newTab()) {
+ const activeTabId = values.activeTabId
+ const mountedLogic = activeTabId ? newTabSceneLogic.findMounted({ tabId: activeTabId }) : null
+ if (mountedLogic) {
+ mountedLogic.actions.focusNewTabSearchInput()
+ } else {
+ // If no mounted logic found, try with default key
+ const defaultLogic = newTabSceneLogic.findMounted({ tabId: 'default' })
+ if (defaultLogic) {
+ defaultLogic.actions.focusNewTabSearchInput()
+ }
+ }
+ return
+ }
+ router.actions.push(urls.newTab())
+
+ return
}
}
window.addEventListener('keydown', onKeyDown)
diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts
index 53b7d1bb49..1516ba841a 100644
--- a/frontend/src/scenes/sceneTypes.ts
+++ b/frontend/src/scenes/sceneTypes.ts
@@ -56,6 +56,7 @@ export enum Scene {
ExperimentsSharedMetric = 'ExperimentsSharedMetric',
ExperimentsSharedMetrics = 'ExperimentsSharedMetrics',
ExploreEvents = 'ExploreEvents',
+ ExploreSessions = 'ExploreSessions',
FeatureFlag = 'FeatureFlag',
FeatureFlags = 'FeatureFlags',
Game368 = 'Game368',
@@ -98,6 +99,7 @@ export enum Scene {
PipelineNodeNew = 'PipelineNodeNew',
PreflightCheck = 'PreflightCheck',
Products = 'Products',
+ UseCaseSelection = 'UseCaseSelection',
ProjectCreateFirst = 'ProjectCreate',
ProjectHomepage = 'ProjectHomepage',
PropertyDefinition = 'PropertyDefinition',
@@ -112,6 +114,7 @@ export enum Scene {
SQLEditor = 'SQLEditor',
SavedInsights = 'SavedInsights',
SessionAttributionExplorer = 'SessionAttributionExplorer',
+ SessionProfile = 'SessionProfile',
Settings = 'Settings',
Signup = 'Signup',
Site = 'Site',
diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts
index 58d8a9ad21..de2d0c9ffd 100644
--- a/frontend/src/scenes/scenes.ts
+++ b/frontend/src/scenes/scenes.ts
@@ -218,6 +218,13 @@ export const sceneConfigurations: Record = {
description: 'A catalog of all user interactions with your app or website.',
iconType: 'event',
},
+ [Scene.ExploreSessions]: {
+ projectBased: true,
+ name: 'Explore sessions',
+ defaultDocsPath: '/docs/data/sessions',
+ description: 'A catalog of all user sessions with your app or website.',
+ iconType: 'session_replay',
+ },
[Scene.FeatureFlag]: {
projectBased: true,
activityScope: ActivityScope.FEATURE_FLAG,
@@ -292,7 +299,13 @@ export const sceneConfigurations: Record = {
[Scene.Login]: { onlyUnauthenticated: true },
[Scene.Max]: { projectBased: true, name: 'Max', layout: 'app-raw', hideProjectNotice: true },
[Scene.MoveToPostHogCloud]: { name: 'Move to PostHog Cloud', hideProjectNotice: true },
- [Scene.NewTab]: { projectBased: true, name: 'New tab', hideProjectNotice: true, layout: 'app-raw' },
+ [Scene.NewTab]: {
+ projectBased: true,
+ name: 'Search',
+ iconType: 'search',
+ hideProjectNotice: true,
+ layout: 'app-raw',
+ },
[Scene.Notebook]: {
projectBased: true,
name: 'Notebook',
@@ -343,6 +356,7 @@ export const sceneConfigurations: Record = {
},
[Scene.PreflightCheck]: { onlyUnauthenticated: true },
[Scene.Products]: { projectBased: true, name: 'Products', layout: 'plain' },
+ [Scene.UseCaseSelection]: { projectBased: true, name: 'Use case selection', layout: 'plain' },
[Scene.ProjectCreateFirst]: {
name: 'Project creation',
organizationBased: true,
@@ -428,6 +442,7 @@ export const sceneConfigurations: Record = {
iconType: 'product_analytics',
},
[Scene.SessionAttributionExplorer]: { projectBased: true, name: 'Session attribution explorer (beta)' },
+ [Scene.SessionProfile]: { projectBased: true, name: 'Session profile' },
[Scene.Settings]: { projectBased: true, name: 'Settings' },
[Scene.Signup]: { onlyUnauthenticated: true },
[Scene.Site]: { projectBased: true, hideProjectNotice: true, layout: 'app-raw' },
@@ -620,6 +635,7 @@ export const routes: Record = {
[urls.dataManagementHistory()]: [Scene.DataManagement, 'dataManagementHistory'],
[urls.database()]: [Scene.DataManagement, 'database'],
[urls.activity(ActivityTab.ExploreEvents)]: [Scene.ExploreEvents, 'exploreEvents'],
+ [urls.activity(ActivityTab.ExploreSessions)]: [Scene.ExploreSessions, 'exploreSessions'],
[urls.activity(ActivityTab.LiveEvents)]: [Scene.LiveEvents, 'liveEvents'],
[urls.replay()]: [Scene.Replay, 'replay'],
// One entry for every available tab
@@ -634,6 +650,7 @@ export const routes: Record = {
[urls.replaySingle(':id')]: [Scene.ReplaySingle, 'replaySingle'],
[urls.replayPlaylist(':id')]: [Scene.ReplayPlaylist, 'replayPlaylist'],
[urls.replaySettings()]: [Scene.ReplaySettings, 'replaySettings'],
+ [urls.sessionProfile(':id')]: [Scene.SessionProfile, 'sessionProfile'],
[urls.personByDistinctId('*', false)]: [Scene.Person, 'personByDistinctId'],
[urls.personByUUID('*', false)]: [Scene.Person, 'personByUUID'],
[urls.persons()]: [Scene.Persons, 'persons'],
@@ -693,6 +710,7 @@ export const routes: Record = {
[urls.passwordReset()]: [Scene.PasswordReset, 'passwordReset'],
[urls.passwordResetComplete(':uuid', ':token')]: [Scene.PasswordResetComplete, 'passwordResetComplete'],
[urls.products()]: [Scene.Products, 'products'],
+ [urls.useCaseSelection()]: [Scene.UseCaseSelection, 'useCaseSelection'],
[urls.onboarding(':productKey')]: [Scene.Onboarding, 'onboarding'],
[urls.verifyEmail()]: [Scene.VerifyEmail, 'verifyEmail'],
[urls.verifyEmail(':uuid')]: [Scene.VerifyEmail, 'verifyEmailWithUuid'],
diff --git a/frontend/src/scenes/session-recordings/components/RecordingRow.tsx b/frontend/src/scenes/session-recordings/components/RecordingRow.tsx
index b810a76023..a1a4586d1b 100644
--- a/frontend/src/scenes/session-recordings/components/RecordingRow.tsx
+++ b/frontend/src/scenes/session-recordings/components/RecordingRow.tsx
@@ -19,7 +19,10 @@ export interface RecordingRowProps {
type ACTIVITY_DESCRIPTIONS = 'very low' | 'low' | 'medium' | 'high' | 'very high'
-function ActivityScoreLabel({ score }: { score: number | undefined }): JSX.Element {
+function getActivityScoreDescription({ score }: { score: number | undefined }): {
+ description: ACTIVITY_DESCRIPTIONS
+ backgroundColor: string
+} {
const n = score ?? 0
let backgroundColor = 'bg-primary-alt-highlight'
let description: ACTIVITY_DESCRIPTIONS = 'very low'
@@ -37,6 +40,22 @@ function ActivityScoreLabel({ score }: { score: number | undefined }): JSX.Eleme
description = 'low'
}
+ return { description, backgroundColor }
+}
+
+export function ActivityScoreLabel({
+ score,
+ clean = false,
+}: {
+ score: number | undefined
+ clean?: boolean
+}): JSX.Element {
+ const { description, backgroundColor } = getActivityScoreDescription({ score })
+
+ if (clean) {
+ return <>{description}>
+ }
+
return activity: {description}
}
diff --git a/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFiltersEmbed.tsx b/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFiltersEmbed.tsx
index bb313d577b..bf0a982670 100644
--- a/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFiltersEmbed.tsx
+++ b/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFiltersEmbed.tsx
@@ -26,7 +26,6 @@ import {
Popover,
} from '@posthog/lemon-ui'
-import { AccessControlAction } from 'lib/components/AccessControlAction'
import { DateFilter } from 'lib/components/DateFilter/DateFilter'
import { PropertyFilterIcon } from 'lib/components/PropertyFilters/components/PropertyFilterIcon'
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
@@ -35,7 +34,6 @@ import { universalFiltersLogic } from 'lib/components/UniversalFilters/universal
import { isCommentTextFilter, isUniversalGroupFilterLike } from 'lib/components/UniversalFilters/utils'
import { FEATURE_FLAGS } from 'lib/constants'
import { useOnMountEffect } from 'lib/hooks/useOnMountEffect'
-import { LemonMenuOverlay } from 'lib/lemon-ui/LemonMenu/LemonMenu'
import { Tooltip } from 'lib/lemon-ui/Tooltip'
import { IconUnverifiedEvent } from 'lib/lemon-ui/icons'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
@@ -50,8 +48,6 @@ import { groupsModel } from '~/models/groupsModel'
import { AndOrFilterSelect } from '~/queries/nodes/InsightViz/PropertyGroupFilters/AndOrFilterSelect'
import { NodeKind } from '~/queries/schema/schema-general'
import {
- AccessControlLevel,
- AccessControlResourceType,
EventPropertyFilter,
PersonPropertyFilter,
PropertyFilterType,
@@ -191,8 +187,6 @@ export const RecordingsUniversalFiltersEmbedButton = ({
const { setIsFiltersExpanded } = useActions(playlistLogic)
const { playlistTimestampFormat } = useValues(playerSettingsLogic)
const { setPlaylistTimestampFormat } = useActions(playerSettingsLogic)
- const { featureFlags } = useValues(featureFlagLogic)
- const replayFiltersRedesignEnabled = featureFlags[FEATURE_FLAGS.REPLAY_FILTERS_REDESIGN] === 'test'
return (
<>
@@ -231,7 +225,7 @@ export const RecordingsUniversalFiltersEmbedButton = ({
>
- {replayFiltersRedesignEnabled && }
+
[] = [
{
key: 'filters',
label: Filters ,
- content: replayFiltersRedesignEnabled ? (
+ content: (
{appliedSavedFilter && (
@@ -487,77 +479,75 @@ export const RecordingsUniversalFiltersEmbed = ({
taxonomicGroupTypes={taxonomicGroupTypes}
onChange={(filterGroup) => setFilters({ filter_group: filterGroup })}
>
- {replayFiltersRedesignEnabled && (
-
- Add filters:
-
-
-
-
- {/* Add filter button scoped to the first nested group */}
- {filters.filter_group.values.length > 0 &&
- isUniversalGroupFilterLike(filters.filter_group.values[0]) && (
- {
- const newFilterGroup = {
- ...filters.filter_group,
- values: [nestedGroup, ...filters.filter_group.values.slice(1)],
- }
- setFilters({ filter_group: newFilterGroup })
- }}
+
+ Add filters:
+
+
+
+
+ {/* Add filter button scoped to the first nested group */}
+ {filters.filter_group.values.length > 0 &&
+ isUniversalGroupFilterLike(filters.filter_group.values[0]) && (
+ {
+ const newFilterGroup = {
+ ...filters.filter_group,
+ values: [nestedGroup, ...filters.filter_group.values.slice(1)],
+ }
+ setFilters({ filter_group: newFilterGroup })
+ }}
+ >
+ setIsPopoverVisible(false)}
+ />
+ }
+ placement="bottom"
+ visible={isPopoverVisible}
+ onClickOutside={() => setIsPopoverVisible(false)}
>
- setIsPopoverVisible(false)}
- />
- }
- placement="bottom"
- visible={isPopoverVisible}
- onClickOutside={() => setIsPopoverVisible(false)}
+ }
+ onClick={() => setIsPopoverVisible(!isPopoverVisible)}
>
- }
- onClick={() => setIsPopoverVisible(!isPopoverVisible)}
- >
- Add filter
-
-
-
- )}
-
- )}
+ Add filter
+
+
+
+ )}
+
@@ -602,7 +592,7 @@ export const RecordingsUniversalFiltersEmbed = ({
pageKey="session-recordings"
size="small"
/>
-
+
@@ -626,169 +616,6 @@ export const RecordingsUniversalFiltersEmbed = ({
{SaveFiltersModal()}
- ) : (
-
-
- {
- let values = filters.filter_group.values
-
- // set the type on the nested child when only using a single filter group
- const hasSingleGroup = values.length === 1
- if (hasSingleGroup) {
- const group = values[0] as UniversalFiltersGroup
- values = [{ ...group, type }]
- }
-
- setFilters({
- filter_group: {
- type: type,
- values: values,
- },
- })
- }}
- topLevelFilter={true}
- suffix={['filter', 'filters']}
- size="small"
- />
-
-
- setFilters({
- filter_test_accounts: testFilters.filter_test_accounts,
- })
- }
- />
-
-
-
-
-
- Applied filters:
- {
- setFilters({
- date_from: changedDateFrom,
- date_to: changedDateTo,
- })
- }}
- dateOptions={[
- { key: 'Custom', values: [] },
- { key: 'Last 24 hours', values: ['-24h'] },
- { key: 'Last 3 days', values: ['-3d'] },
- { key: 'Last 7 days', values: ['-7d'] },
- { key: 'Last 30 days', values: ['-30d'] },
- { key: 'All time', values: ['-90d'] },
- ]}
- dropdownPlacement="bottom-start"
- size="small"
- // we always want to include the time in the date when setting it
- allowTimePrecision={true}
- // we always want to present the time control
- forceGranularity="minute"
- />
- {
- setFilters({
- duration: [
- {
- ...newRecordingDurationFilter,
- key: newDurationType,
- },
- ],
- })
- }}
- recordingDurationFilter={durationFilter}
- durationTypeFilter={durationFilter.key}
- pageKey="session-recordings"
- size="small"
- />
- setFilters({ filter_group: filterGroup })}
- >
-
-
-
-
-
-
- }
- tooltip="Reset any changes you've made to the filters"
- disabledReason={
- !(resetFilters && (totalFiltersCount ?? 0) > 0) ? 'No filters applied' : undefined
- }
- >
- Reset filters
-
- {appliedSavedFilter ? (
- void updateSavedFilter()}
- tooltip="Update saved filter"
- disabledReason={
- equal(appliedSavedFilter.filters, filters) ? 'No changes to update' : undefined
- }
- sideAction={{
- dropdown: {
- placement: 'bottom-end',
- overlay: (
- setIsSaveFiltersModalOpen(true),
- },
- ]}
- />
- ),
- },
- }}
- >
- Update "{appliedSavedFilter.name || 'Unnamed'}"
-
- ) : (
-
- setIsSaveFiltersModalOpen(true)}
- disabledReason={
- (totalFiltersCount ?? 0) === 0 ? 'No filters applied' : undefined
- }
- tooltip="Save filters for later"
- >
- Add to "Saved filters"
-
-
- )}
-
- setIsFiltersExpanded(false)}
- tooltip="Close filters and start watching recordings"
- >
- {(totalFiltersCount ?? 0) === 0 ? 'Close filters' : 'Start watching'}
-
-
- {SaveFiltersModal()}
-
),
'data-attr': 'session-recordings-filters-tab',
},
diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx
index e538b56e6d..bc7c749bfd 100644
--- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx
+++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx
@@ -4,6 +4,7 @@ import { IconShare } from '@posthog/icons'
import { LemonButton, LemonMenu, Link } from '@posthog/lemon-ui'
import { ErrorDisplay, idFrom } from 'lib/components/Errors/ErrorDisplay'
+import { ErrorEventType } from 'lib/components/Errors/types'
import { getExceptionAttributes } from 'lib/components/Errors/utils'
import { EventPropertyTabs } from 'lib/components/EventPropertyTabs/EventPropertyTabs'
import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo'
@@ -218,7 +219,12 @@ export function ItemEventDetail({ item }: ItemEventProps): JSX.Element {
>
)
case 'error_display':
- return
+ return (
+
+ )
default:
return
}
diff --git a/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts
index 44f8cbd4db..a77af589c3 100644
--- a/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts
@@ -1,6 +1,6 @@
+import equal from 'fast-deep-equal'
import { actions, connect, events, kea, listeners, path, reducers, selectors } from 'kea'
-import { objectsEqual } from 'lib/utils'
import { sessionRecordingEventUsageLogic } from 'scenes/session-recordings/sessionRecordingEventUsageLogic'
import { teamLogic } from 'scenes/teamLogic'
@@ -212,7 +212,7 @@ export const miniFiltersLogic = kea([
enabled: selectedMiniFilters.includes(x.key),
}))
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
miniFiltersByKey: [
@@ -223,7 +223,7 @@ export const miniFiltersLogic = kea([
return acc
}, {})
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
miniFiltersForTypeByKey: [
diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts
index 4011356744..541460dc82 100644
--- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts
@@ -1,3 +1,4 @@
+import equal from 'fast-deep-equal'
import FuseClass from 'fuse.js'
import { actions, connect, events, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
@@ -14,7 +15,7 @@ import api from 'lib/api'
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
import { Dayjs, dayjs } from 'lib/dayjs'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
-import { ceilMsToClosestSecond, eventToDescription, humanizeBytes, objectsEqual, toParams } from 'lib/utils'
+import { ceilMsToClosestSecond, eventToDescription, humanizeBytes, toParams } from 'lib/utils'
import { getText } from 'scenes/comments/Comment'
import {
InspectorListItemPerformance,
@@ -673,7 +674,7 @@ export const playerInspectorLogic = kea([
rawConsoleLogs: consoleLogs,
}
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
notebookCommentItems: [
@@ -704,7 +705,9 @@ export const playerInspectorLogic = kea([
}
return items
},
- { resultEqualityCheck: objectsEqual },
+ {
+ resultEqualityCheck: equal,
+ },
],
commentItems: [
@@ -738,7 +741,7 @@ export const playerInspectorLogic = kea([
}
return items
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
allContextItems: [
@@ -790,7 +793,7 @@ export const playerInspectorLogic = kea([
return items
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
allItems: [
@@ -985,7 +988,7 @@ export const playerInspectorLogic = kea([
itemsByType,
}
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
filteredItems: [
@@ -1028,7 +1031,7 @@ export const playerInspectorLogic = kea([
return acc
}, [] as InspectorListItem[])
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
seekbarItems: [
@@ -1115,7 +1118,7 @@ export const playerInspectorLogic = kea([
return eventAndCommentItems
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
inspectorDataState: [
@@ -1231,7 +1234,7 @@ export const playerInspectorLogic = kea([
}
return fuse.search(searchQuery).map((x) => x.item)
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
allItemsList: [(s) => [s.allItems], (allItemsData): InspectorListItem[] => allItemsData.items],
@@ -1276,7 +1279,7 @@ export const playerInspectorLogic = kea([
},
})),
propsChanged(({ actions, props }, oldProps) => {
- if (!objectsEqual(props.matchingEventsMatchType, oldProps.matchingEventsMatchType)) {
+ if (!equal(props.matchingEventsMatchType, oldProps.matchingEventsMatchType)) {
actions.loadMatchingEvents()
}
}),
diff --git a/frontend/src/scenes/session-recordings/player/player-meta/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/player-meta/PlayerMetaLinks.tsx
index b35e943d48..457eea64c4 100644
--- a/frontend/src/scenes/session-recordings/player/player-meta/PlayerMetaLinks.tsx
+++ b/frontend/src/scenes/session-recordings/player/player-meta/PlayerMetaLinks.tsx
@@ -13,9 +13,7 @@ import {
import { LemonButton, LemonButtonProps, LemonDialog, LemonMenu, LemonMenuItems, LemonTag } from '@posthog/lemon-ui'
import { AccessControlAction } from 'lib/components/AccessControlAction'
-import { FEATURE_FLAGS } from 'lib/constants'
import { IconBlank } from 'lib/lemon-ui/icons'
-import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { getAccessControlDisabledReason } from 'lib/utils/accessControlUtils'
import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext'
import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton'
@@ -145,7 +143,6 @@ const MenuActions = ({ size }: { size: PlayerMetaBreakpoints }): JSX.Element =>
const { logicProps, isMuted, hasReachedExportFullVideoLimit } = useValues(sessionRecordingPlayerLogic)
const { deleteRecording, setIsFullScreen, exportRecordingToFile, exportRecordingToVideoFile, setMuted } =
useActions(sessionRecordingPlayerLogic)
- const { featureFlags } = useValues(featureFlagLogic)
const { skipInactivitySetting } = useValues(playerSettingsLogic)
const { setSkipInactivitySetting } = useActions(playerSettingsLogic)
@@ -204,26 +201,24 @@ const MenuActions = ({ size }: { size: PlayerMetaBreakpoints }): JSX.Element =>
'Export PostHog recording data to a JSON file. This can be loaded later into PostHog for playback.',
'data-attr': 'replay-export-posthog-json',
},
- isStandardMode && featureFlags[FEATURE_FLAGS.REPLAY_EXPORT_FULL_VIDEO]
- ? {
- label: (
-
- Export to MP4{' '}
-
- BETA
-
-
- ),
- status: hasReachedExportFullVideoLimit ? 'danger' : 'default',
- icon: ,
- onClick: () => exportRecordingToVideoFile(),
- tooltip: hasReachedExportFullVideoLimit
- ? 'You have reached your export limit.'
- : 'Export PostHog recording data to MP4 video file.',
- 'data-attr': 'replay-export-mp4',
- className: hasReachedExportFullVideoLimit ? 'replay-export-limit-reached-button' : '',
- }
- : null,
+ isStandardMode && {
+ label: (
+
+ Export to MP4{' '}
+
+ BETA
+
+
+ ),
+ status: hasReachedExportFullVideoLimit ? 'danger' : 'default',
+ icon: ,
+ onClick: () => exportRecordingToVideoFile(),
+ tooltip: hasReachedExportFullVideoLimit
+ ? 'You have reached your export limit.'
+ : 'Export PostHog recording data to MP4 video file.',
+ 'data-attr': 'replay-export-mp4',
+ className: hasReachedExportFullVideoLimit ? 'replay-export-limit-reached-button' : '',
+ },
]
if (logicProps.playerKey !== 'modal') {
diff --git a/frontend/src/scenes/session-recordings/player/sessionEventsDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionEventsDataLogic.ts
index 96b8bd34a7..a4fec4e5f7 100644
--- a/frontend/src/scenes/session-recordings/player/sessionEventsDataLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionEventsDataLogic.ts
@@ -1,3 +1,4 @@
+import equal from 'fast-deep-equal'
import { actions, beforeUnmount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import { subscriptions } from 'kea-subscriptions'
@@ -5,7 +6,6 @@ import posthog from 'posthog-js'
import api from 'lib/api'
import { Dayjs, dayjs } from 'lib/dayjs'
-import { objectsEqual } from 'lib/utils'
import { chainToElements } from 'lib/utils/elements-chain'
import { TimeTree } from 'lib/utils/time-tree'
@@ -230,7 +230,7 @@ AND properties.$lib != 'web'`
...AIEvents,
...exceptionEvents,
],
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
eventViewportsItems: [
(s) => [s.sessionEventsData],
@@ -258,7 +258,7 @@ AND properties.$lib != 'web'`
)
return viewportEvents
},
- { resultEqualityCheck: objectsEqual },
+ { resultEqualityCheck: equal },
],
viewportForTimestamp: [
(s) => [s.eventViewportsItems],
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataCoordinatorLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataCoordinatorLogic.ts
index f3b489bd1c..38bfedbec3 100644
--- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataCoordinatorLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataCoordinatorLogic.ts
@@ -1,3 +1,4 @@
+import equal from 'fast-deep-equal'
import { actions, beforeUnmount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { subscriptions } from 'kea-subscriptions'
import posthog from 'posthog-js'
@@ -5,7 +6,6 @@ import posthog from 'posthog-js'
import { EventType, customEvent, eventWithTime } from '@posthog/rrweb-types'
import { Dayjs, dayjs } from 'lib/dayjs'
-import { objectsEqual } from 'lib/utils'
import {
RecordingSegment,
@@ -311,7 +311,7 @@ export const sessionRecordingDataCoordinatorLogic = kea {
describe('recording viewed summary event', () => {
describe('play_time_ms tracking', () => {
+ const startPlaying = (): void => {
+ logic.actions.setPlay()
+ logic.actions.endBuffer()
+ }
+
beforeEach(() => {
jest.useFakeTimers({
now: new Date('2024-02-07T00:00:01.123Z'),
})
+ logic.unmount()
+ logic = sessionRecordingPlayerLogic({ sessionRecordingId: '2', playerKey: 'test' })
+ logic.mount()
})
it('initializes playingTimeTracking correctly', () => {
expect(logic.values.playingTimeTracking).toEqual({
- state: 'unknown',
+ state: 'paused',
lastTimestamp: null,
watchTime: 0,
bufferTime: 0,
+ firstPlayTime: undefined,
+ firstPlayStartTime: undefined,
})
})
it('sets buffering state with startBuffer', () => {
expect(logic.values.playingTimeTracking.lastTimestamp).toBeNull()
+ logic.actions.setPlay()
logic.actions.startBuffer()
expect(logic.values.playingTimeTracking.state).toBe('buffering')
expect(logic.values.playingTimeTracking.lastTimestamp).not.toBeNull()
})
- it('correctly tracks buffer time', () => {
+ it('tracks buffer time', () => {
+ logic.actions.setPlay()
logic.actions.startBuffer()
-
- expect(logic.values.playingTimeTracking.state).toBe('buffering')
- expect(logic.values.playingTimeTracking.lastTimestamp).toBe(0)
-
jest.advanceTimersByTime(1500)
logic.actions.endBuffer()
- expect(logic.values.playingTimeTracking.state).toBe('buffering')
expect(logic.values.playingTimeTracking.bufferTime).toBe(1500)
expect(logic.values.playingTimeTracking.watchTime).toBe(0)
})
- it('sets playing state with setPlay', () => {
- logic.actions.setPlay()
+ it('transitions to playing after endBuffer', () => {
+ startPlaying()
expect(logic.values.playingTimeTracking.state).toBe('playing')
- expect(logic.values.playingTimeTracking.lastTimestamp).toBe(0)
})
- it('accumulates watch time with setPause', () => {
- logic.actions.setPlay()
-
+ it('accumulates watch time during play', () => {
+ startPlaying()
jest.advanceTimersByTime(1000)
logic.actions.setPause()
- expect(logic.values.playingTimeTracking.state).toBe('paused')
expect(logic.values.playingTimeTracking.watchTime).toBe(1000)
})
- it('correctly separates play time from buffer time in alternating sequence', () => {
- // This test ensures we don't accumulate playing time while buffering
- // Scenario: 4 x 1-second play blocks with 3 x 1-second buffer blocks between them
- // Expected: 4 seconds play time, 3 seconds buffer time (total 7 seconds, but only 4 should count as play time)
+ it('separates play and buffer time when alternating', () => {
+ const playFor = (ms: number): void => {
+ startPlaying()
+ jest.advanceTimersByTime(ms)
+ logic.actions.setPause()
+ }
- // Play block 1 (1 second)
- logic.actions.setPlay()
- jest.advanceTimersByTime(1000)
- logic.actions.setPause()
+ const bufferFor = (ms: number): void => {
+ logic.actions.startBuffer()
+ jest.advanceTimersByTime(ms)
+ logic.actions.endBuffer()
+ }
- expect(logic.values.playingTimeTracking.watchTime).toBe(1000)
- expect(logic.values.playingTimeTracking.bufferTime).toBe(0)
+ playFor(1000)
+ bufferFor(1000)
+ playFor(1000)
+ bufferFor(1000)
+ playFor(1000)
+ bufferFor(1000)
+ playFor(1000)
- logic.actions.startBuffer()
- jest.advanceTimersByTime(1000)
- logic.actions.endBuffer()
-
- expect(logic.values.playingTimeTracking.watchTime).toBe(1000)
- expect(logic.values.playingTimeTracking.bufferTime).toBe(1000)
-
- logic.actions.setPlay()
- jest.advanceTimersByTime(1000)
- logic.actions.setPause()
-
- expect(logic.values.playingTimeTracking.watchTime).toBe(2000)
-
- logic.actions.startBuffer()
- jest.advanceTimersByTime(1000)
- logic.actions.endBuffer()
-
- expect(logic.values.playingTimeTracking.watchTime).toBe(2000)
- expect(logic.values.playingTimeTracking.bufferTime).toBe(2000)
-
- logic.actions.setPlay()
- jest.advanceTimersByTime(1000)
- logic.actions.setPause()
-
- expect(logic.values.playingTimeTracking.watchTime).toBe(3000)
-
- logic.actions.startBuffer()
- jest.advanceTimersByTime(1000)
- logic.actions.endBuffer()
-
- expect(logic.values.playingTimeTracking.watchTime).toBe(3000)
- expect(logic.values.playingTimeTracking.bufferTime).toBe(3000)
-
- logic.actions.setPlay()
- jest.advanceTimersByTime(1000)
- logic.actions.setPause()
-
- // Final verification: only 4 seconds of play time, not 7 seconds total
expect(logic.values.playingTimeTracking.watchTime).toBe(4000)
- // Should correctly track 3 seconds of buffer time
expect(logic.values.playingTimeTracking.bufferTime).toBe(3000)
})
- it('handles repeated endBuffer calls without losing time', () => {
- // This test simulates the real-world scenario where endBuffer gets called multiple times
+ it('preserves buffer time on repeated endBuffer calls', () => {
+ logic.actions.setPlay()
logic.actions.startBuffer()
- expect(logic.values.playingTimeTracking.state).toBe('buffering')
-
jest.advanceTimersByTime(1000)
logic.actions.endBuffer()
- expect(logic.values.playingTimeTracking.state).toBe('buffering')
- expect(logic.values.playingTimeTracking.bufferTime).toBe(1000)
-
- logic.actions.endBuffer()
-
- // This should NOT reset the buffer time to 0
- expect(logic.values.playingTimeTracking.bufferTime).toBe(1000)
+ const bufferTime = logic.values.playingTimeTracking.bufferTime
logic.actions.endBuffer()
logic.actions.endBuffer()
logic.actions.endBuffer()
- // Buffer time should remain stable
- expect(logic.values.playingTimeTracking.bufferTime).toBe(1000)
+ expect(logic.values.playingTimeTracking.bufferTime).toBe(bufferTime)
+ })
+
+ describe('time_to_first_play_ms tracking', () => {
+ it('preserves firstPlayTime after buffer interrupts post-threshold', () => {
+ startPlaying()
+ jest.advanceTimersByTime(1000)
+ jest.runOnlyPendingTimers()
+
+ expect(logic.values.playingTimeTracking.firstPlayTime).toBe(0)
+
+ logic.actions.startBuffer()
+ jest.runOnlyPendingTimers()
+
+ expect(logic.values.playingTimeTracking.firstPlayTime).toBe(0)
+ })
+
+ it('records firstPlayTime only once', () => {
+ startPlaying()
+ jest.advanceTimersByTime(1000)
+ jest.runOnlyPendingTimers()
+
+ const firstPlayTime = logic.values.playingTimeTracking.firstPlayTime
+
+ logic.actions.setPause()
+ logic.actions.setPlay()
+ jest.advanceTimersByTime(2000)
+ jest.runOnlyPendingTimers()
+
+ expect(logic.values.playingTimeTracking.firstPlayTime).toBe(firstPlayTime)
+ })
+
+ it('retries tracking after early interruption', () => {
+ jest.advanceTimersByTime(500)
+
+ logic.actions.setPause()
+ expect(logic.values.playingTimeTracking.firstPlayTime).toBeUndefined()
+
+ startPlaying()
+ jest.runOnlyPendingTimers()
+
+ expect(logic.values.playingTimeTracking.firstPlayTime).toBe(500)
+ })
})
})
describe('recording viewed summary analytics', () => {
it('captures all required analytics properties on unmount', () => {
- // Mock posthog.capture to spy on the analytics event
+ jest.useFakeTimers()
+ logic.unmount()
+ logic = sessionRecordingPlayerLogic({ sessionRecordingId: '2', playerKey: 'test' })
+ logic.mount()
+
const mockCapture = jest.fn()
;(posthog as any).capture = mockCapture
- // Use fake timers for this test
- jest.useFakeTimers()
-
- // Simulate user interaction that generates play time
logic.actions.setPlay()
- jest.advanceTimersByTime(1000) // Advance time by 1 second
+ logic.actions.endBuffer()
+ jest.advanceTimersByTime(1001)
logic.actions.setPause()
logic.actions.incrementClickCount()
logic.actions.incrementWarningCount(2)
logic.actions.incrementErrorCount()
- // Unmount to trigger the analytics event
logic.unmount()
expect(mockCapture).toHaveBeenCalledWith(
'recording viewed summary',
expect.objectContaining({
viewed_time_ms: expect.any(Number),
- play_time_ms: 1000,
- buffer_time_ms: 0,
+ play_time_ms: expect.any(Number),
+ buffer_time_ms: expect.any(Number),
+ time_to_first_play_ms: expect.any(Number),
rrweb_warning_count: 2,
error_count_during_recording_playback: 1,
engagement_score: 1,
@@ -515,14 +522,13 @@ describe('sessionRecordingPlayerLogic', () => {
recording_age_ms: undefined,
})
)
+ expect(mockCapture.mock.calls[0][1].time_to_first_play_ms).toBe(0)
})
it('captures "no playtime summary" event when play_time_ms is 0', async () => {
- // Mock posthog.capture to spy on the analytics event
const mockCapture = jest.fn()
;(posthog as any).capture = mockCapture
- // Don't play the recording, just unmount
logic.unmount()
expect(mockCapture).toHaveBeenCalledWith(
@@ -540,7 +546,6 @@ describe('sessionRecordingPlayerLogic', () => {
const mockCapture = jest.fn()
;(posthog as any).capture = mockCapture
- // Simulate multiple clicks
logic.actions.incrementClickCount()
logic.actions.incrementClickCount()
logic.actions.incrementClickCount()
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
index f148a0a6f0..631953ad96 100644
--- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
@@ -71,6 +71,7 @@ export interface PlayerTimeTracking {
lastTimestamp: number | null
watchTime: number
bufferTime: number
+ firstPlayTime: number | undefined
}
export interface RecordingViewedSummaryAnalytics {
@@ -80,6 +81,7 @@ export interface RecordingViewedSummaryAnalytics {
// (this could be longer than the duration, since someone could seek around multiple times)
play_time_ms?: number
buffer_time_ms?: number
+ time_to_first_play_ms?: number
recording_duration_ms?: number
recording_age_ms?: number
recording_retention_period_days?: number
@@ -136,6 +138,17 @@ const smoothingWeights = [
0.07,
]
+const trackingStateMap: Record = {
+ [SessionPlayerState.PLAY]: 'playing',
+ [SessionPlayerState.PAUSE]: 'paused',
+ [SessionPlayerState.BUFFER]: 'buffering',
+ [SessionPlayerState.ERROR]: 'errored',
+ [SessionPlayerState.READY]: 'paused',
+ [SessionPlayerState.SKIP]: 'playing',
+ [SessionPlayerState.SKIP_TO_MATCHING_EVENT]: 'playing',
+ [SessionPlayerState.SCRUB]: 'playing',
+}
+
const isMediaElementPlaying = (element: HTMLMediaElement): boolean =>
!!(element.currentTime > 0 && !element.paused && !element.ended && element.readyState > 2)
@@ -181,41 +194,37 @@ function isUserActivity(snapshot: eventWithTime): boolean {
const updatePlayerTimeTracking = (
current: PlayerTimeTracking,
- newState: PlayerTimeTracking['state']
+ newState: PlayerTimeTracking['state'],
+ openTime?: number
): PlayerTimeTracking => {
+ const now = performance.now()
+
// if we were just playing then update watch time
const newWatchTime =
current.lastTimestamp !== null && current.state === 'playing'
- ? current.watchTime + (performance.now() - current.lastTimestamp)
+ ? current.watchTime + (now - current.lastTimestamp)
: current.watchTime
// if we were just buffering then update buffer time
const newBufferTime =
current.lastTimestamp !== null && current.state === 'buffering'
- ? current.bufferTime + (performance.now() - current.lastTimestamp)
+ ? current.bufferTime + (now - current.lastTimestamp)
: current.bufferTime
- const newLastTimestamp = ['paused', 'ended', 'errored'].includes(newState) ? null : performance.now()
+ const newLastTimestamp = ['paused', 'ended', 'errored'].includes(newState) ? null : now
+
+ const canRecordTimeToFirstPlay =
+ current.firstPlayTime === undefined && openTime !== undefined && newState === 'playing'
return {
state: newState,
lastTimestamp: newLastTimestamp,
watchTime: newWatchTime,
bufferTime: newBufferTime,
+ firstPlayTime: canRecordTimeToFirstPlay ? now - openTime : current.firstPlayTime,
}
}
-const updatePlayerTimeTrackingIfChanged = (
- current: PlayerTimeTracking,
- newState: PlayerTimeTracking['state']
-): PlayerTimeTracking => {
- if (current.state === newState) {
- return current
- }
-
- return updatePlayerTimeTracking(current, newState)
-}
-
function wrapFetchAndReport({
fetch,
onError,
@@ -437,6 +446,7 @@ export const sessionRecordingPlayerLogic = kea(
schedulePlayerTimeTracking: true,
setQuickEmojiIsOpen: (quickEmojiIsOpen: boolean) => ({ quickEmojiIsOpen }),
updatePlayerTimeTracking: true,
+ setPlayerTimeTrackingState: (tracking: PlayerTimeTracking) => ({ tracking }),
exportRecordingToVideoFile: true,
markViewed: (delay?: number) => ({ delay }),
setWasMarkedViewed: (wasMarkedViewed: boolean) => ({ wasMarkedViewed }),
@@ -446,7 +456,7 @@ export const sessionRecordingPlayerLogic = kea(
setMuted: (muted: boolean) => ({ muted }),
setSkipToFirstMatchingEvent: (skipToFirstMatchingEvent: boolean) => ({ skipToFirstMatchingEvent }),
}),
- reducers(({ props }) => ({
+ reducers(() => ({
skipToFirstMatchingEvent: [
false,
{
@@ -584,67 +594,11 @@ export const sessionRecordingPlayerLogic = kea(
lastTimestamp: null,
watchTime: 0,
bufferTime: 0,
+ firstPlayTime: undefined,
} as PlayerTimeTracking,
{
- updatePlayerTimeTracking: (state) => {
- // called on a timer to avoid inactive watching from not capturing a clear time
- return ['playing', 'buffering'].includes(state.state)
- ? updatePlayerTimeTracking(state, state.state)
- : state
- },
- startBuffer: (state) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
- return updatePlayerTimeTrackingIfChanged(state, 'buffering')
- },
- endBuffer: (state) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
-
- // endBuffer is often called later than start playing, we only need to act on it, if we were just buffering
- if (state.state !== 'buffering') {
- return state
- }
-
- // don't change the state
- return updatePlayerTimeTracking(state, state.state)
- },
- setPlay: (state) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
-
- return updatePlayerTimeTrackingIfChanged(state, 'playing')
- },
- setPause: (state) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
-
- return updatePlayerTimeTrackingIfChanged(state, 'paused')
- },
- setEndReached: (state, { reached }) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
-
- if (!reached) {
- return state
- }
-
- return updatePlayerTimeTrackingIfChanged(state, 'ended')
- },
- setPlayerError: (state) => {
- if (props.mode === SessionRecordingPlayerMode.Preview) {
- return state
- }
-
- return updatePlayerTimeTrackingIfChanged(state, 'errored')
- },
- seekToTime: (state) => {
- return state
+ setPlayerTimeTrackingState: (state, { tracking }) => {
+ return objectsEqual(state, tracking) ? state : tracking
},
},
],
@@ -1390,6 +1344,9 @@ export const sessionRecordingPlayerLogic = kea(
if (nextTimestamp !== undefined) {
actions.seekToTimestamp(nextTimestamp, true)
}
+
+ cache.disposables.dispose('playerTimeTracking')
+ actions.schedulePlayerTimeTracking()
},
markViewed: async ({ delay }, breakpoint) => {
breakpoint()
@@ -1781,15 +1738,30 @@ export const sessionRecordingPlayerLogic = kea(
await document.exitFullscreen()
}
},
+ updatePlayerTimeTracking: () => {
+ if (props.mode === SessionRecordingPlayerMode.Preview) {
+ return
+ }
+
+ // Map actual player state to tracking state
+ // we might be buffering data, while a user is watching already loaded data
+ // so we need to track the state of the player, not the state of this logic
+ const actualPlayerState = values.currentPlayerState
+ const desiredState = trackingStateMap[actualPlayerState]
+ const newState = updatePlayerTimeTracking(values.playingTimeTracking, desiredState, cache.openTime)
+ actions.setPlayerTimeTrackingState(newState)
+ },
schedulePlayerTimeTracking: () => {
- const currentState = values.playingTimeTracking.state
- const interval = currentState === 'playing' ? 5000 : 30000
+ const hasCompletedFirstPlay = values.playingTimeTracking.firstPlayTime !== undefined
cache.disposables.add(() => {
- const timerId = setTimeout(() => {
- actions.updatePlayerTimeTracking()
- actions.schedulePlayerTimeTracking()
- }, interval)
+ const timerId = setTimeout(
+ () => {
+ actions.updatePlayerTimeTracking()
+ actions.schedulePlayerTimeTracking()
+ },
+ hasCompletedFirstPlay ? 500 : 1000
+ )
return () => clearTimeout(timerId)
}, 'playerTimeTracking')
},
@@ -1838,6 +1810,8 @@ export const sessionRecordingPlayerLogic = kea(
if (value === SessionPlayerState.PLAY && !values.wasMarkedViewed) {
actions.markViewed(0)
}
+ // Update tracking state whenever player state changes
+ actions.updatePlayerTimeTracking()
},
})),
@@ -1859,6 +1833,7 @@ export const sessionRecordingPlayerLogic = kea(
viewed_time_ms: cache.openTime !== undefined ? performance.now() - cache.openTime : undefined,
play_time_ms: playTimeMs,
buffer_time_ms: values.playingTimeTracking.bufferTime || 0,
+ time_to_first_play_ms: values.playingTimeTracking.firstPlayTime,
recording_duration_ms: values.sessionPlayerData ? values.sessionPlayerData.durationMs : undefined,
recording_age_ms:
values.sessionPlayerData && values.sessionPlayerData.segments.length > 0
@@ -1867,9 +1842,9 @@ export const sessionRecordingPlayerLogic = kea(
recording_retention_period_days: values.sessionPlayerData.sessionRetentionPeriodDays ?? undefined,
rrweb_warning_count: values.warningCount,
error_count_during_recording_playback: values.errorCount,
- // as a starting and very loose measure of engagement, we count clicks
engagement_score: values.clickCount,
}
+
posthog.capture(
playTimeMs === 0 ? 'recording viewed with no playtime summary' : 'recording viewed summary',
summaryAnalytics
@@ -1895,9 +1870,9 @@ export const sessionRecordingPlayerLogic = kea(
}
cache.openTime = performance.now()
- // we rely on actions hitting a reducer to update the timer
- // let's ping it once in a while so that if the user
- // is autoplaying and doesn't interact we get a more recent value
+ // Update tracking state immediately to capture initial state
+ actions.updatePlayerTimeTracking()
+ // Schedule periodic updates
actions.schedulePlayerTimeTracking()
}),
])
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts
index 6fbbe3d54e..4e1b61f0a0 100644
--- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts
+++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts
@@ -23,14 +23,20 @@ describe('sessionRecordingsPlaylistLogic', () => {
viewed: false,
recording_duration: 10,
start_time: '2023-10-12T16:55:36.404000Z',
+ end_time: '2023-10-12T16:55:46.404000Z',
console_error_count: 50,
+ viewers: [],
+ snapshot_source: 'web' as const,
}
const bRecording = {
id: 'def',
viewed: false,
recording_duration: 10,
start_time: '2023-05-12T16:55:36.404000Z',
+ end_time: '2023-05-12T16:55:46.404000Z',
console_error_count: 100,
+ viewers: [],
+ snapshot_source: 'web' as const,
}
const listOfSessionRecordings = [aRecording, bRecording]
const offsetRecording = {
@@ -38,7 +44,10 @@ describe('sessionRecordingsPlaylistLogic', () => {
viewed: false,
recording_duration: 10,
start_time: '2023-08-12T16:55:36.404000Z',
+ end_time: '2023-08-12T16:55:46.404000Z',
console_error_count: 75,
+ viewers: [],
+ snapshot_source: 'web' as const,
}
beforeEach(() => {
@@ -128,7 +137,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
describe('global logic', () => {
beforeEach(() => {
logic = sessionRecordingsPlaylistLogic({
- key: 'tests',
+ logicKey: 'tests',
updateSearchParams: true,
})
logic.mount()
@@ -271,7 +280,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
it('reads filters from the logic props', async () => {
logic = sessionRecordingsPlaylistLogic({
- key: 'tests-with-props',
+ logicKey: 'tests-with-props',
filters: {
duration: [],
filter_group: {
@@ -387,7 +396,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
router.actions.push('/replay/recent', { sessionRecordingId: 'abc' })
logic = sessionRecordingsPlaylistLogic({
- key: 'hash-recording-tests',
+ logicKey: 'hash-recording-tests',
updateSearchParams: true,
})
logic.mount()
@@ -395,8 +404,6 @@ describe('sessionRecordingsPlaylistLogic', () => {
await expectLogic(logic).toDispatchActions(['loadSessionRecordingsSuccess']).toMatchValues({
selectedRecordingId: 'abc',
})
-
- logic.actions.setSelectedRecordingId('1234')
})
})
@@ -553,7 +560,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
describe('person specific logic', () => {
beforeEach(() => {
logic = sessionRecordingsPlaylistLogic({
- key: 'cool_user_99',
+ logicKey: 'cool_user_99',
personUUID: 'cool_user_99',
updateSearchParams: true,
})
@@ -570,14 +577,16 @@ describe('sessionRecordingsPlaylistLogic', () => {
router.actions.push('/person/123', { sessionRecordingId: 'abc' })
expect(router.values.searchParams).toHaveProperty('sessionRecordingId', 'abc')
- await expectLogic(logic).toDispatchActions([logic.actionCreators.setSelectedRecordingId('abc')])
+ await expectLogic(logic)
+ .toDispatchActions([logic.actionCreators.setSelectedRecordingId('abc')])
+ .toFinishAllListeners()
})
})
describe('total filters count', () => {
beforeEach(() => {
logic = sessionRecordingsPlaylistLogic({
- key: 'cool_user_99',
+ logicKey: 'cool_user_99',
personUUID: 'cool_user_99',
updateSearchParams: true,
})
@@ -637,7 +646,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
describe('resetting filters', () => {
beforeEach(() => {
logic = sessionRecordingsPlaylistLogic({
- key: 'cool_user_99',
+ logicKey: 'cool_user_99',
personUUID: 'cool_user_99',
updateSearchParams: true,
})
@@ -672,7 +681,7 @@ describe('sessionRecordingsPlaylistLogic', () => {
describe('set filters', () => {
beforeEach(() => {
logic = sessionRecordingsPlaylistLogic({
- key: 'cool_user_99',
+ logicKey: 'cool_user_99',
personUUID: 'cool_user_99',
updateSearchParams: true,
})
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
index b6d12c4e28..be4b4b7f92 100644
--- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
+++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
@@ -520,6 +520,7 @@ export const sessionRecordingsPlaylistLogic = kea s.id === values.selectedRecordingId)
+
+ // If recording not found in current list, reload with the new selected recording
+ // The backend will automatically include it via session_recording_id parameter
+ if (recordingIndex === -1 && values.selectedRecordingId) {
+ actions.loadSessionRecordings()
+ }
+
+ // If we are at the end of the list then try to load more
if (recordingIndex === values.sessionRecordings.length - 1) {
actions.maybeLoadSessionRecordings('older')
}
diff --git a/frontend/src/scenes/sessions/SessionProfileScene.tsx b/frontend/src/scenes/sessions/SessionProfileScene.tsx
new file mode 100644
index 0000000000..89bb190288
--- /dev/null
+++ b/frontend/src/scenes/sessions/SessionProfileScene.tsx
@@ -0,0 +1,130 @@
+import { BindLogic, useActions, useValues } from 'kea'
+
+import { IconRefresh } from '@posthog/icons'
+import { LemonButton } from '@posthog/lemon-ui'
+
+import { CopyToClipboardInline } from 'lib/components/CopyToClipboard'
+import { NotFound } from 'lib/components/NotFound'
+import { TZLabel } from 'lib/components/TZLabel'
+import ViewRecordingButton from 'lib/components/ViewRecordingButton/ViewRecordingButton'
+import { SpinnerOverlay } from 'lib/lemon-ui/Spinner/Spinner'
+import { PersonDisplay } from 'scenes/persons/PersonDisplay'
+import { Scene, SceneExport } from 'scenes/sceneTypes'
+import { sceneConfigurations } from 'scenes/scenes'
+import { urls } from 'scenes/urls'
+
+import { SceneContent } from '~/layout/scenes/components/SceneContent'
+import { SceneDivider } from '~/layout/scenes/components/SceneDivider'
+import { SceneTitleSection } from '~/layout/scenes/components/SceneTitleSection'
+import { ActivityTab } from '~/types'
+
+import { SessionDetailsCard } from './components/SessionDetailsCard'
+import { SessionEventsList } from './components/SessionEventsList'
+import { SessionMetricsCard } from './components/SessionMetricsCard'
+import { SessionProfileLogicProps, sessionProfileLogic } from './sessionProfileLogic'
+
+export const scene: SceneExport = {
+ component: SessionProfileScene,
+ logic: sessionProfileLogic,
+ paramsToProps: ({ params: { id } }) => ({ sessionId: decodeURIComponent(id) }),
+}
+
+export function SessionProfileScene(): JSX.Element {
+ const {
+ sessionId,
+ sessionData,
+ isInitialLoading,
+ sessionDataLoading,
+ sessionEventsLoading,
+ hasRecording,
+ hasRecordingLoading,
+ } = useValues(sessionProfileLogic)
+ const { loadSessionData } = useActions(sessionProfileLogic)
+
+ if (!sessionData && !isInitialLoading) {
+ return
+ }
+
+ if (isInitialLoading) {
+ return
+ }
+
+ return (
+
+
+
+ }
+ onClick={() => loadSessionData()}
+ loading={sessionDataLoading || sessionEventsLoading}
+ >
+ Refresh
+
+ >
+ }
+ />
+
+
+
+
+ {sessionData && (
+
+
+ Session ID
+
+ {sessionId}
+
+
+
+
+
+
+ )}
+
+
+
+
+
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/components/SessionDetailsCard.tsx b/frontend/src/scenes/sessions/components/SessionDetailsCard.tsx
new file mode 100644
index 0000000000..43aa5dc6fd
--- /dev/null
+++ b/frontend/src/scenes/sessions/components/SessionDetailsCard.tsx
@@ -0,0 +1,209 @@
+import { useValues } from 'kea'
+import { useState } from 'react'
+
+import { IconCollapse, IconExpand } from '@posthog/icons'
+import { LemonButton, LemonCard, LemonDivider, LemonTag, Link } from '@posthog/lemon-ui'
+
+import { TZLabel } from 'lib/components/TZLabel'
+
+import { SessionData, sessionProfileLogic } from '../sessionProfileLogic'
+
+interface DetailRowProps {
+ label: string
+ value: React.ReactNode
+ className?: string
+}
+
+function DetailRow({ label, value, className }: DetailRowProps): JSX.Element {
+ return (
+
+ {label}:
+ {value}
+
+ )
+}
+
+interface DetailSectionProps {
+ title: string
+ children: React.ReactNode
+ defaultExpanded?: boolean
+ showBorder?: boolean
+}
+
+function DetailSection({
+ title,
+ children,
+ defaultExpanded = true,
+ showBorder = true,
+}: DetailSectionProps): JSX.Element {
+ const [isExpanded, setIsExpanded] = useState(defaultExpanded)
+
+ return (
+
+ setIsExpanded(!isExpanded)}
+ >
+ : }
+ size="small"
+ onClick={(e) => {
+ e.stopPropagation()
+ setIsExpanded(!isExpanded)
+ }}
+ />
+ {title}
+
+ {isExpanded && {children} }
+
+ )
+}
+
+export interface SessionDetailsCardProps {
+ sessionData: SessionData | null
+ isLoading?: boolean
+}
+
+export function SessionDetailsCard(): JSX.Element | null {
+ const { sessionData, isInitialLoading, supportTicketEvents } = useValues(sessionProfileLogic)
+ // only support zendesk ticket for our organization through feature flag
+ const hasSupportTickets = supportTicketEvents.length > 0
+
+ if (!sessionData || isInitialLoading) {
+ return null
+ }
+
+ const hasAttribution =
+ sessionData.entry_utm_source ||
+ sessionData.entry_utm_campaign ||
+ sessionData.entry_utm_medium ||
+ sessionData.entry_referring_domain
+
+ const hasUrls =
+ sessionData.entry_current_url ||
+ sessionData.end_current_url ||
+ sessionData.last_external_click_url ||
+ (sessionData.urls && sessionData.urls.length > 0)
+
+ return (
+
+
+ {sessionData.channel_type && (
+ {sessionData.channel_type}} />
+ )}
+
+ {sessionData.is_bounce ? 'Yes' : 'No'}
+
+ }
+ />
+ {sessionData.entry_hostname && }
+ {sessionData.entry_pathname && }
+
+
+ {hasAttribution && (
+
+ {sessionData.entry_referring_domain && (
+
+ )}
+ {sessionData.entry_utm_source && (
+
+ )}
+ {sessionData.entry_utm_campaign && (
+
+ )}
+ {sessionData.entry_utm_medium && (
+
+ )}
+
+ )}
+
+ {hasUrls && (
+
+ {sessionData.entry_current_url && (
+
+ {sessionData.entry_current_url}
+
+ }
+ />
+ )}
+ {sessionData.end_current_url && (
+
+ {sessionData.end_current_url}
+
+ }
+ />
+ )}
+ {sessionData.last_external_click_url && (
+
+ {sessionData.last_external_click_url}
+
+ }
+ />
+ )}
+ {sessionData.urls && sessionData.urls.length > 0 && (
+
+
+ All URLs ({sessionData.urls.length}):
+
+
+ {sessionData.urls.map((url, index) => (
+
+
+ {index + 1}. {url}
+
+
+ ))}
+
+
+ )}
+
+ )}
+
+ {hasSupportTickets && (
+
+ {supportTicketEvents.map((event, index) => {
+ const ticketId = event.properties?.zendesk_ticket_id
+ // only support zendesk ticket for our organization through feature flag
+ const zendeskUrl = ticketId ? `https://posthoghelp.zendesk.com/agent/tickets/${ticketId}` : null
+
+ return (
+
+ {index > 0 && }
+
+
+ :
+
+
+ {zendeskUrl ? (
+
+ Ticket #{ticketId}
+
+ ) : (
+ No ticket ID
+ )}
+
+
+
+ )
+ })}
+
+ )}
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/components/SessionEventDetails.tsx b/frontend/src/scenes/sessions/components/SessionEventDetails.tsx
new file mode 100644
index 0000000000..44c577ad86
--- /dev/null
+++ b/frontend/src/scenes/sessions/components/SessionEventDetails.tsx
@@ -0,0 +1,50 @@
+import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay'
+import { EventPropertyTabs } from 'lib/components/EventPropertyTabs/EventPropertyTabs'
+import { SimpleKeyValueList } from 'lib/components/SimpleKeyValueList'
+import { dayjs } from 'lib/dayjs'
+import { Spinner } from 'lib/lemon-ui/Spinner'
+
+import { RecordingEventType } from '~/types'
+
+export interface SessionEventDetailsProps {
+ event: RecordingEventType
+}
+
+export function SessionEventDetails({ event }: SessionEventDetailsProps): JSX.Element {
+ if (!event.fullyLoaded) {
+ return (
+
+
+ Loading event details...
+
+ )
+ }
+
+ return (
+
+ {
+ switch (tabKey) {
+ case 'error_display':
+ const eventId =
+ ('uuid' in event ? event.uuid : null) ||
+ ('id' in event ? event.id : null) ||
+ dayjs(event.timestamp).toISOString() ||
+ `error-${event.timestamp}`
+ return
+ case 'raw':
+ return (
+
+ {JSON.stringify(event, null, 2)}
+
+ )
+ default:
+ return
+ }
+ }}
+ />
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/components/SessionEventItem.tsx b/frontend/src/scenes/sessions/components/SessionEventItem.tsx
new file mode 100644
index 0000000000..0a32e3a46d
--- /dev/null
+++ b/frontend/src/scenes/sessions/components/SessionEventItem.tsx
@@ -0,0 +1,203 @@
+import clsx from 'clsx'
+
+import {
+ BaseIcon,
+ IconBolt,
+ IconCollapse,
+ IconCursor,
+ IconExpand,
+ IconEye,
+ IconLeave,
+ IconLogomark,
+ IconTerminal,
+} from '@posthog/icons'
+import { LemonButton } from '@posthog/lemon-ui'
+
+import { getExceptionAttributes } from 'lib/components/Errors/utils'
+import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo'
+import { TZLabel } from 'lib/components/TZLabel'
+import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
+
+import { RecordingEventType } from '~/types'
+
+import { SessionEventDetails } from './SessionEventDetails'
+
+// Exception title pill component for compact exception display
+function ExceptionTitlePill({ event }: { event: RecordingEventType }): JSX.Element {
+ const errorProps = getExceptionAttributes(event.properties || {})
+
+ const type = errorProps.type
+ const value = errorProps.value
+
+ if (!type && !value) {
+ return Exception
+ }
+
+ return (
+
+ {type && {type}}
+ {type && value && :}
+ {value && {value}}
+
+ )
+}
+
+// Event type to icon mapping based on PlayerInspectorListItem pattern
+export function eventToIcon(event: string | undefined | null): React.ComponentType {
+ switch (event) {
+ case '$pageview':
+ case '$screen':
+ return IconEye
+ case '$pageleave':
+ return IconLeave
+ case '$autocapture':
+ return IconBolt
+ case '$exception':
+ case 'error':
+ return IconTerminal
+ default:
+ // Check if it's a core PostHog event
+ if (event && event.startsWith('$')) {
+ return IconLogomark
+ }
+ // Custom events
+ if (event !== undefined && event !== null) {
+ return IconCursor
+ }
+ return BaseIcon
+ }
+}
+
+// Determine highlight color based on event type
+// Pattern from playerInspectorLogic.ts
+export function getEventHighlightColor(event: RecordingEventType): 'danger' | 'warning' | 'primary' | null {
+ const eventName = event.event?.toLowerCase()
+
+ // Exception events get danger highlight
+ if (eventName === '$exception' || event.properties?.$exception_message) {
+ return 'danger'
+ }
+
+ // Console logs - errors and warnings
+ const logLevel = event.properties?.$console_log_level
+ if (logLevel === 'error') {
+ return 'danger'
+ }
+ if (logLevel === 'warn') {
+ return 'warning'
+ }
+
+ // Network errors (4xx, 5xx responses)
+ const responseStatus = event.properties?.$response_status || event.properties?.status_code
+ if (responseStatus && responseStatus >= 400) {
+ return 'danger'
+ }
+
+ return null
+}
+
+export interface SessionEventItemProps {
+ event: RecordingEventType
+ index: number
+ isExpanded: boolean
+ onToggleExpand: (index: number) => void
+ onLoadEventDetails?: (eventId: string, eventName: string) => void
+}
+
+export function SessionEventItem({
+ event,
+ index,
+ isExpanded,
+ onToggleExpand,
+ onLoadEventDetails,
+}: SessionEventItemProps): JSX.Element {
+ const EventIcon = eventToIcon(event.event)
+ const highlightColor = getEventHighlightColor(event)
+
+ const handleToggle = (): void => {
+ if (!isExpanded && !event.fullyLoaded && onLoadEventDetails) {
+ onLoadEventDetails(event.id, event.event)
+ }
+ onToggleExpand(index)
+ }
+
+ return (
+
+
+
+
+
+
+
+
+ {event.event === '$exception' ? (
+
+ ) : (
+ event.properties?.$pathname &&
+ event.properties?.$host && (
+
+ - {event.properties.$host}
+ {event.properties.$pathname}
+
+ )
+ )}
+
+ : }
+ size="small"
+ noPadding
+ onClick={(e) => {
+ e.stopPropagation()
+ handleToggle()
+ }}
+ />
+
+
+ {isExpanded && (
+
+ )}
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/components/SessionEventsList.tsx b/frontend/src/scenes/sessions/components/SessionEventsList.tsx
new file mode 100644
index 0000000000..9d993df95a
--- /dev/null
+++ b/frontend/src/scenes/sessions/components/SessionEventsList.tsx
@@ -0,0 +1,143 @@
+import clsx from 'clsx'
+import { useActions, useValues } from 'kea'
+import { useState } from 'react'
+
+import { IconCollapse, IconExpand, IconSort } from '@posthog/icons'
+import { LemonButton, LemonCard } from '@posthog/lemon-ui'
+
+import { RecordingEventType } from '~/types'
+
+import { sessionProfileLogic } from '../sessionProfileLogic'
+import { SessionEventItem } from './SessionEventItem'
+
+export interface SessionEventsListProps {
+ events: RecordingEventType[] | null
+ totalEventCount?: number | null
+ isLoading?: boolean
+ isLoadingMore?: boolean
+ hasMoreEvents?: boolean
+ onLoadEventDetails?: (eventId: string, eventName: string) => void
+ onLoadMoreEvents?: () => void
+ sortOrder: 'asc' | 'desc'
+ onSortOrderChange: (sortOrder: 'asc' | 'desc') => void
+}
+
+export function SessionEventsList(): JSX.Element {
+ const {
+ sessionEvents,
+ totalEventCount,
+ isInitialLoading,
+ isLoadingMore,
+ hasMoreEvents,
+ sortOrder,
+ eventsListFolded,
+ } = useValues(sessionProfileLogic)
+ const { loadEventDetails, loadMoreSessionEvents, setSortOrder, setEventsListFolded } =
+ useActions(sessionProfileLogic)
+ const [expandedIndices, setExpandedIndices] = useState>(new Set())
+
+ const handleToggleExpand = (index: number): void => {
+ setExpandedIndices((prev) => {
+ const newSet = new Set(prev)
+ if (newSet.has(index)) {
+ newSet.delete(index)
+ } else {
+ newSet.add(index)
+ }
+ return newSet
+ })
+ }
+
+ const handleCollapseAll = (): void => {
+ setExpandedIndices(new Set())
+ }
+
+ const handleScroll = (e: React.UIEvent): void => {
+ if (!hasMoreEvents || isLoadingMore || !loadMoreSessionEvents) {
+ return
+ }
+
+ const target = e.currentTarget
+ const scrollBottom = target.scrollHeight - target.scrollTop - target.clientHeight
+
+ // Load more when within 200px of bottom
+ if (scrollBottom < 200) {
+ loadMoreSessionEvents()
+ }
+ }
+
+ if (isInitialLoading) {
+ return (
+
+ Loading events...
+
+ )
+ }
+
+ if (!sessionEvents || sessionEvents?.length === 0) {
+ return (
+
+ No events found
+
+ )
+ }
+
+ return (
+
+ {/* Header */}
+
+
+ : }
+ onClick={() => setEventsListFolded(!eventsListFolded)}
+ />
+
+ Events (
+ {totalEventCount !== null && totalEventCount !== undefined
+ ? totalEventCount
+ : sessionEvents.length}
+ )
+
+
+
+ }
+ onClick={() => setSortOrder(sortOrder === 'asc' ? 'desc' : 'asc')}
+ tooltip={sortOrder === 'asc' ? 'Sorted: Oldest first' : 'Sorted: Newest first'}
+ >
+ {sortOrder === 'asc' ? 'Oldest first' : 'Newest first'}
+
+
+ Collapse All
+
+
+
+
+ {/* Events List */}
+ {!eventsListFolded && (
+
+ {sessionEvents?.map((event, index) => (
+
+ ))}
+ {hasMoreEvents && (
+
+ {isLoadingMore ? 'Loading more events...' : 'Scroll for more'}
+
+ )}
+
+ )}
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/components/SessionMetricsCard.tsx b/frontend/src/scenes/sessions/components/SessionMetricsCard.tsx
new file mode 100644
index 0000000000..bc5cf69698
--- /dev/null
+++ b/frontend/src/scenes/sessions/components/SessionMetricsCard.tsx
@@ -0,0 +1,64 @@
+import { useValues } from 'kea'
+
+import { LemonCard, LemonSkeleton } from '@posthog/lemon-ui'
+
+import { humanFriendlyDuration } from 'lib/utils'
+
+import { sessionProfileLogic } from '../sessionProfileLogic'
+
+interface MetricCardProps {
+ title: string
+ value: string | number | null
+ isLoading?: boolean
+ subtitle?: string
+}
+
+function MetricCard({ title, value, isLoading, subtitle }: MetricCardProps): JSX.Element {
+ return (
+
+
+ {title}
+ {isLoading ? (
+
+ ) : (
+ <>
+ {value ?? '-'}
+ {subtitle && {subtitle} }
+ >
+ )}
+
+
+ )
+}
+
+export function SessionMetricsCard(): JSX.Element {
+ const { sessionData, sessionDuration, uniqueUrlCount, totalEventCount, otherEventCount, isInitialLoading } =
+ useValues(sessionProfileLogic)
+ return (
+
+
+
+
+ 0 ? ` + ${otherEventCount} other` : ''
+ }`
+ : undefined
+ }
+ isLoading={isInitialLoading}
+ />
+
+
+ )
+}
diff --git a/frontend/src/scenes/sessions/sessionProfileLogic.ts b/frontend/src/scenes/sessions/sessionProfileLogic.ts
new file mode 100644
index 0000000000..eb17acd997
--- /dev/null
+++ b/frontend/src/scenes/sessions/sessionProfileLogic.ts
@@ -0,0 +1,630 @@
+import { actions, events, kea, key, listeners, path, props, reducers, selectors } from 'kea'
+import { loaders } from 'kea-loaders'
+
+import api from 'lib/api'
+
+import { NodeKind } from '~/queries/schema/schema-general'
+import { hogql } from '~/queries/utils'
+import { SessionEventType } from '~/types'
+
+import type { sessionProfileLogicType } from './sessionProfileLogicType'
+
+export interface SessionProfileLogicProps {
+ sessionId: string
+}
+
+export interface SessionData {
+ session_id: string
+ distinct_id: string
+ person_properties: Record | null
+ start_timestamp: string
+ end_timestamp: string
+ entry_current_url: string | null
+ end_current_url: string | null
+ urls: string[]
+ num_uniq_urls: number
+ pageview_count: number
+ autocapture_count: number
+ screen_count: number
+ session_duration: number
+ channel_type: string | null
+ is_bounce: boolean
+ entry_hostname: string | null
+ entry_pathname: string | null
+ entry_utm_source: string | null
+ entry_utm_campaign: string | null
+ entry_utm_medium: string | null
+ entry_referring_domain: string | null
+ last_external_click_url: string | null
+}
+
+export const sessionProfileLogic = kea([
+ path(['scenes', 'sessions', 'sessionProfileLogic']),
+ props({} as SessionProfileLogicProps),
+ key((props) => props.sessionId),
+ actions({
+ loadSessionData: true,
+ loadSessionEvents: true,
+ loadMoreSessionEvents: true,
+ loadEventDetails: (eventId: string, eventName: string) => ({ eventId, eventName }),
+ setHasMoreEvents: (hasMore: boolean) => ({ hasMore }),
+ updateEventsOffset: (offset: number) => ({ offset }),
+ loadTotalEventCount: true,
+ setSortOrder: (sortOrder: 'asc' | 'desc') => ({ sortOrder }),
+ loadRecordingAvailability: true,
+ setEventsListFolded: (isFolded: boolean) => ({ isFolded }),
+ loadSupportTicketEvents: true,
+ }),
+ reducers({
+ hasMoreEvents: [
+ true,
+ {
+ loadSessionEventsSuccess: (_, { sessionEvents }) => sessionEvents.length === 50,
+ setHasMoreEvents: (_, { hasMore }) => hasMore,
+ },
+ ],
+ eventsOffset: [
+ 0 as number,
+ {
+ loadSessionEventsSuccess: (_, { sessionEvents }) => sessionEvents.length,
+ loadMoreSessionEvents: (state) => state, // Preserve before loading
+ updateEventsOffset: (_, { offset }) => offset,
+ setSortOrder: () => 0, // Reset offset when sort changes
+ },
+ ],
+ sortOrder: [
+ 'asc' as 'asc' | 'desc',
+ {
+ setSortOrder: (_, { sortOrder }) => sortOrder,
+ },
+ ],
+ eventsListFolded: [
+ false,
+ {
+ setEventsListFolded: (_, { isFolded }) => isFolded,
+ },
+ ],
+ }),
+ loaders(({ props, values }) => ({
+ sessionData: [
+ null as SessionData | null,
+ {
+ loadSessionData: async () => {
+ // First get the session data
+ const sessionQuery = hogql`
+ SELECT
+ session_id,
+ distinct_id,
+ $start_timestamp,
+ $end_timestamp,
+ $entry_current_url,
+ $end_current_url,
+ $urls,
+ $num_uniq_urls,
+ $pageview_count,
+ $autocapture_count,
+ $screen_count,
+ $session_duration,
+ $channel_type,
+ $is_bounce,
+ $entry_hostname,
+ $entry_pathname,
+ $entry_utm_source,
+ $entry_utm_campaign,
+ $entry_utm_medium,
+ $entry_referring_domain,
+ $last_external_click_url
+ FROM sessions
+ WHERE $start_timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND $start_timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 1 HOUR
+ AND session_id = ${props.sessionId}
+ LIMIT 1
+ `
+
+ const response = await api.queryHogQL(sessionQuery)
+ const row = response.results?.[0]
+
+ if (!row) {
+ return null
+ }
+
+ const distinct_id = row[1]
+
+ // Second query: get person properties if we have a distinct_id
+ let person_properties: Record | null = null
+ if (distinct_id && distinct_id !== '$posthog_cookieless') {
+ try {
+ const personQuery = hogql`
+ SELECT properties
+ FROM persons
+ WHERE id IN (
+ SELECT person_id
+ FROM person_distinct_ids
+ WHERE distinct_id = ${distinct_id}
+ LIMIT 1
+ )
+ LIMIT 1
+ `
+ const personResponse = await api.queryHogQL(personQuery)
+ const personRow = personResponse.results?.[0]
+ if (personRow && personRow[0]) {
+ person_properties = JSON.parse(personRow[0])
+ }
+ } catch (e) {
+ console.error('Failed to fetch person properties:', e)
+ }
+ }
+
+ return {
+ session_id: row[0],
+ distinct_id: row[1],
+ start_timestamp: row[2],
+ end_timestamp: row[3],
+ entry_current_url: row[4],
+ end_current_url: row[5],
+ urls: row[6] || [],
+ num_uniq_urls: row[7] || 0,
+ pageview_count: row[8] || 0,
+ autocapture_count: row[9] || 0,
+ screen_count: row[10] || 0,
+ session_duration: row[11] || 0,
+ channel_type: row[12],
+ is_bounce: row[13] || false,
+ entry_hostname: row[14],
+ entry_pathname: row[15],
+ entry_utm_source: row[16],
+ entry_utm_campaign: row[17],
+ entry_utm_medium: row[18],
+ entry_referring_domain: row[19],
+ last_external_click_url: row[20],
+ person_properties,
+ }
+ },
+ },
+ ],
+ sessionEvents: [
+ null as SessionEventType[] | null,
+ {
+ loadSessionEvents: async () => {
+ const sortOrder = values.sortOrder || 'asc'
+ const eventsQuery =
+ sortOrder === 'asc'
+ ? hogql`
+ SELECT
+ uuid,
+ event,
+ timestamp,
+ properties.$window_id,
+ properties.$current_url,
+ properties.$event_type,
+ properties.$screen_name,
+ properties.$pathname,
+ properties.$exception_type,
+ properties.$exception_message,
+ properties.$console_log_level,
+ properties.$response_status,
+ properties.$exception_list,
+ distinct_id
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ ORDER BY timestamp ASC
+ LIMIT 50
+ `
+ : hogql`
+ SELECT
+ uuid,
+ event,
+ timestamp,
+ properties.$window_id,
+ properties.$current_url,
+ properties.$event_type,
+ properties.$screen_name,
+ properties.$pathname,
+ properties.$exception_type,
+ properties.$exception_message,
+ properties.$console_log_level,
+ properties.$response_status,
+ properties.$exception_list,
+ distinct_id
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ ORDER BY timestamp DESC
+ LIMIT 50
+ `
+
+ const response = await api.queryHogQL(eventsQuery)
+
+ return (response.results || []).map((row: any): SessionEventType => {
+ const properties: Record = {}
+
+ // Only add properties if they have values (not null/undefined)
+ if (row[3] != null) {
+ properties.$window_id = row[3]
+ }
+ if (row[4] != null) {
+ properties.$current_url = row[4]
+ }
+ if (row[5] != null) {
+ properties.$event_type = row[5]
+ }
+ if (row[6] != null) {
+ properties.$screen_name = row[6]
+ }
+ if (row[7] != null) {
+ properties.$pathname = row[7]
+ }
+ if (row[8] != null) {
+ properties.$exception_type = row[8]
+ }
+ if (row[9] != null) {
+ properties.$exception_message = row[9]
+ }
+ if (row[10] != null) {
+ properties.$console_log_level = row[10]
+ }
+ if (row[11] != null) {
+ properties.$response_status = row[11]
+ }
+
+ // Parse $exception_list if it exists (comes as JSON string)
+ if (row[12] != null) {
+ try {
+ properties.$exception_list = JSON.parse(row[12])
+ } catch (e) {
+ console.error(e)
+ properties.$exception_list = []
+ }
+ }
+
+ return {
+ id: row[0],
+ event: row[1],
+ timestamp: row[2],
+ properties,
+ distinct_id: row[13],
+ fullyLoaded: false,
+ }
+ })
+ },
+ loadMoreSessionEvents: async (_, breakpoint) => {
+ await breakpoint(500) // Debounce rapid scroll
+
+ const currentEvents = values.sessionEvents || []
+ const offset = values.eventsOffset
+ const sortOrder = values.sortOrder || 'asc'
+
+ const eventsQuery =
+ sortOrder === 'asc'
+ ? hogql`
+ SELECT
+ uuid,
+ event,
+ timestamp,
+ properties.$window_id,
+ properties.$current_url,
+ properties.$event_type,
+ properties.$screen_name,
+ properties.$pathname,
+ properties.$exception_type,
+ properties.$exception_message,
+ properties.$console_log_level,
+ properties.$response_status,
+ properties.$exception_list,
+ distinct_id
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ ORDER BY timestamp ASC
+ LIMIT 50
+ OFFSET ${offset}
+ `
+ : hogql`
+ SELECT
+ uuid,
+ event,
+ timestamp,
+ properties.$window_id,
+ properties.$current_url,
+ properties.$event_type,
+ properties.$screen_name,
+ properties.$pathname,
+ properties.$exception_type,
+ properties.$exception_message,
+ properties.$console_log_level,
+ properties.$response_status,
+ properties.$exception_list,
+ distinct_id
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ ORDER BY timestamp DESC
+ LIMIT 50
+ OFFSET ${offset}
+ `
+
+ const response = await api.queryHogQL(eventsQuery)
+
+ const newEvents = (response.results || []).map((row: any): SessionEventType => {
+ const properties: Record = {}
+
+ if (row[3] != null) {
+ properties.$window_id = row[3]
+ }
+ if (row[4] != null) {
+ properties.$current_url = row[4]
+ }
+ if (row[5] != null) {
+ properties.$event_type = row[5]
+ }
+ if (row[6] != null) {
+ properties.$screen_name = row[6]
+ }
+ if (row[7] != null) {
+ properties.$pathname = row[7]
+ }
+ if (row[8] != null) {
+ properties.$exception_type = row[8]
+ }
+ if (row[9] != null) {
+ properties.$exception_message = row[9]
+ }
+ if (row[10] != null) {
+ properties.$console_log_level = row[10]
+ }
+ if (row[11] != null) {
+ properties.$response_status = row[11]
+ }
+
+ if (row[12] != null) {
+ try {
+ properties.$exception_list = JSON.parse(row[12])
+ } catch (e) {
+ console.error(e)
+ properties.$exception_list = []
+ }
+ }
+
+ return {
+ id: row[0],
+ event: row[1],
+ timestamp: row[2],
+ properties,
+ distinct_id: row[13],
+ fullyLoaded: false,
+ }
+ })
+
+ // Append new events to existing events
+ return [...currentEvents, ...newEvents]
+ },
+ },
+ ],
+ eventDetails: [
+ {} as Record>,
+ {
+ loadEventDetails: async ({ eventId, eventName }) => {
+ // Fetch full properties for the specific event
+ // Use timestamp filtering based on session_id to enable partition pruning
+ // Also filter by event name to improve query performance
+ const detailsQuery = hogql`
+ SELECT properties, uuid
+ FROM events
+ WHERE event = ${eventName}
+ AND timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND uuid = ${eventId}
+ LIMIT 1
+ `
+
+ const response = await api.queryHogQL(detailsQuery)
+
+ if (!response.results || response.results.length === 0) {
+ return {}
+ }
+
+ const [propertiesJson, uuid] = response.results[0]
+ const fullProperties = JSON.parse(propertiesJson)
+
+ return { [uuid]: fullProperties }
+ },
+ },
+ ],
+ totalEventCount: [
+ null as number | null,
+ {
+ loadTotalEventCount: async () => {
+ const countQuery = hogql`
+ SELECT count(*) as total
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ `
+
+ const response = await api.queryHogQL(countQuery)
+ return response.results?.[0]?.[0] || 0
+ },
+ },
+ ],
+ hasRecording: [
+ false as boolean,
+ {
+ loadRecordingAvailability: async () => {
+ // Use UUIDv7 timestamp for partition pruning (session_id is UUIDv7)
+ const startTime = `UUIDv7ToDateTime(toUUID('${props.sessionId}'))`
+ const endTime = `${startTime} + INTERVAL 1 DAY`
+
+ const response = await api.recordings.list({
+ kind: NodeKind.RecordingsQuery,
+ session_ids: [props.sessionId],
+ date_from: startTime,
+ date_to: endTime,
+ limit: 1,
+ })
+ return (response.results?.length ?? 0) > 0
+ },
+ },
+ ],
+ supportTicketEvents: [
+ [] as SessionEventType[],
+ {
+ loadSupportTicketEvents: async () => {
+ const ticketsQuery = hogql`
+ SELECT
+ uuid,
+ event,
+ timestamp,
+ properties.zendesk_ticket_id,
+ distinct_id
+ FROM events
+ WHERE timestamp >= UUIDv7ToDateTime(toUUID(${props.sessionId}))
+ AND timestamp <= UUIDv7ToDateTime(toUUID(${props.sessionId})) + INTERVAL 2 DAY
+ AND \`$session_id\` = ${props.sessionId}
+ AND event = 'support_ticket'
+ ORDER BY timestamp DESC
+ `
+
+ const response = await api.queryHogQL(ticketsQuery)
+
+ return (response.results || []).map((row: any): SessionEventType => {
+ const properties: Record = {}
+
+ if (row[3] != null) {
+ properties.zendesk_ticket_id = row[3]
+ }
+
+ return {
+ id: row[0],
+ event: row[1],
+ timestamp: row[2],
+ properties,
+ distinct_id: row[4],
+ fullyLoaded: false,
+ }
+ })
+ },
+ },
+ ],
+ })),
+ selectors({
+ sessionId: [() => [(_, props) => props.sessionId], (sessionId) => sessionId],
+ sessionDuration: [
+ (s) => [s.sessionData],
+ (sessionData: SessionData | null): number | null => {
+ // Session duration is already calculated in seconds in the table
+ return sessionData?.session_duration || null
+ },
+ ],
+ uniqueUrlCount: [
+ (s) => [s.sessionData],
+ (sessionData: SessionData | null): number => {
+ return sessionData?.num_uniq_urls || 0
+ },
+ ],
+ categorizedEventCount: [
+ (s) => [s.sessionData],
+ (sessionData: SessionData | null): number => {
+ if (!sessionData) {
+ return 0
+ }
+ return (
+ (sessionData.pageview_count || 0) +
+ (sessionData.autocapture_count || 0) +
+ (sessionData.screen_count || 0)
+ )
+ },
+ ],
+ otherEventCount: [
+ (s) => [s.totalEventCount, s.sessionData],
+ (totalEventCount: number | null, sessionData: SessionData | null): number => {
+ if (!totalEventCount || !sessionData) {
+ return 0
+ }
+ const categorized =
+ (sessionData.pageview_count || 0) +
+ (sessionData.autocapture_count || 0) +
+ (sessionData.screen_count || 0)
+ return Math.max(0, totalEventCount - categorized)
+ },
+ ],
+ isInitialLoading: [
+ (s) => [s.sessionDataLoading, s.sessionEventsLoading, s.sessionData, s.sessionEvents],
+ (
+ sessionDataLoading: boolean,
+ sessionEventsLoading: boolean,
+ sessionData: SessionData | null,
+ sessionEvents: SessionEventType[] | null
+ ): boolean =>
+ (sessionDataLoading && sessionData === null) || (sessionEventsLoading && sessionEvents === null),
+ ],
+ isLoadingMore: [
+ (s) => [s.sessionEventsLoading, s.sessionEvents],
+ (sessionEventsLoading: boolean, sessionEvents: SessionEventType[] | null): boolean =>
+ sessionEventsLoading && sessionEvents !== null,
+ ],
+ }),
+ listeners(({ actions, values }) => ({
+ loadSessionData: () => {
+ actions.loadSessionEvents()
+ actions.loadTotalEventCount()
+ actions.loadRecordingAvailability()
+ actions.loadSupportTicketEvents()
+ },
+ setSortOrder: () => {
+ // Reset hasMoreEvents when changing sort order
+ actions.setHasMoreEvents(true)
+ // Reload events with new sort order
+ actions.loadSessionEvents()
+ },
+ loadMoreSessionEventsSuccess: ({ sessionEvents }) => {
+ const previousCount = values.eventsOffset
+ const newCount = sessionEvents.length
+ const fetchedCount = newCount - previousCount
+
+ // Stop loading if we fetched less than 50 events (or if something went wrong)
+ if (fetchedCount < 50) {
+ actions.setHasMoreEvents(false)
+ }
+
+ // Only update offset if we actually got new events
+ if (fetchedCount > 0) {
+ actions.updateEventsOffset(newCount)
+ }
+ },
+ loadEventDetailsSuccess: ({ eventDetails }) => {
+ // After loading event details, update the sessionEvents array
+ const events = values.sessionEvents
+ if (!events || !eventDetails || Object.keys(eventDetails).length === 0) {
+ return
+ }
+
+ const updatedEvents = events.map((event) => {
+ const fullProperties = eventDetails[event.id]
+ if (fullProperties) {
+ return {
+ ...event,
+ properties: {
+ ...event.properties,
+ ...fullProperties,
+ },
+ fullyLoaded: true,
+ }
+ }
+ return event
+ })
+
+ actions.loadSessionEventsSuccess(updatedEvents)
+ },
+ })),
+ events(({ actions }) => ({
+ afterMount: () => {
+ actions.loadSessionData()
+ },
+ })),
+])
diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx
index cb82cbb094..43614e35cc 100644
--- a/frontend/src/scenes/settings/SettingsMap.tsx
+++ b/frontend/src/scenes/settings/SettingsMap.tsx
@@ -689,7 +689,7 @@ export const SETTINGS_MAP: SettingSection[] = [
description: (
// Note: Sync the copy below with AIConsentPopoverWrapper.tsx
<>
- PostHog AI features, such as our assistant Max, use{' '}
+ PostHog AI features, such as the PostHog AI chat, use{' '}
external AI services
{' '}
diff --git a/frontend/src/scenes/settings/environment/ActivityLogSettings.tsx b/frontend/src/scenes/settings/environment/ActivityLogSettings.tsx
index b3b84812ae..cb677895bb 100644
--- a/frontend/src/scenes/settings/environment/ActivityLogSettings.tsx
+++ b/frontend/src/scenes/settings/environment/ActivityLogSettings.tsx
@@ -4,11 +4,12 @@ import { IconInfo } from '@posthog/icons'
import { LemonButton, LemonSwitch, Tooltip } from '@posthog/lemon-ui'
import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini'
+import { useRestrictedArea } from 'lib/components/RestrictedArea'
+import { OrganizationMembershipLevel } from 'lib/constants'
import { eventUsageLogic } from 'lib/utils/eventUsageLogic'
import { LinkedHogFunctions } from 'scenes/hog-functions/list/LinkedHogFunctions'
import { teamLogic } from 'scenes/teamLogic'
import { urls } from 'scenes/urls'
-import { userLogic } from 'scenes/userLogic'
import { AvailableFeature } from '~/types'
@@ -27,11 +28,12 @@ export function ActivityLogSettings(): JSX.Element {
}
export function ActivityLogOrgLevelSettings(): JSX.Element {
- const { userLoading } = useValues(userLogic)
const { currentTeam } = useValues(teamLogic)
const { updateCurrentTeam } = useActions(teamLogic)
const { reportActivityLogSettingToggled } = useActions(eventUsageLogic)
+ const restrictionReason = useRestrictedArea({ minimumAccessLevel: OrganizationMembershipLevel.Admin })
+
const handleToggle = (checked: boolean): void => {
updateCurrentTeam({ receive_org_level_activity_logs: checked })
reportActivityLogSettingToggled(checked)
@@ -41,7 +43,7 @@ export function ActivityLogOrgLevelSettings(): JSX.Element {
- Enable organization-level activity logs notifications for this project.
+ Enable organization-level activity logs for this project.
@@ -59,7 +61,7 @@ export function ActivityLogOrgLevelSettings(): JSX.Element {
id="posthog-activity-log-org-level-switch"
onChange={handleToggle}
checked={!!currentTeam?.receive_org_level_activity_logs}
- disabled={userLoading}
+ disabledReason={restrictionReason || undefined}
label="Receive organization-level activity logs"
bordered
/>
diff --git a/frontend/src/scenes/settings/environment/SlackIntegration.tsx b/frontend/src/scenes/settings/environment/SlackIntegration.tsx
index ab4e646d8a..2f8327bb50 100644
--- a/frontend/src/scenes/settings/environment/SlackIntegration.tsx
+++ b/frontend/src/scenes/settings/environment/SlackIntegration.tsx
@@ -14,7 +14,7 @@ import { userLogic } from 'scenes/userLogic'
const getSlackAppManifest = (): any => ({
display_information: {
name: 'PostHog',
- description: 'Product Insights right where you need them',
+ description: 'Product insights right where you need them',
background_color: '#f54e00',
},
features: {
diff --git a/frontend/src/scenes/settings/environment/UsageMetricsConfig.tsx b/frontend/src/scenes/settings/environment/UsageMetricsConfig.tsx
index 13118864e8..07f3fe4587 100644
--- a/frontend/src/scenes/settings/environment/UsageMetricsConfig.tsx
+++ b/frontend/src/scenes/settings/environment/UsageMetricsConfig.tsx
@@ -290,7 +290,7 @@ export function UsageMetricsConfig(): JSX.Element {
{!isEditing && (
- }>
+ }>
Add metric
)}
diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx
index 2b99c7ab63..5e46d1d165 100644
--- a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx
+++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx
@@ -123,7 +123,7 @@ export function PersonsModal({
onClose={closeModal}
onAfterClose={onAfterClose}
simple
- width={560}
+ width={600}
inline={inline}
>
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index effccd6a00..855dfc0baa 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -31,7 +31,7 @@ export const urls = {
default: (): string => '/',
project: (id: string | number, path = ''): string => `/project/${id}` + path,
currentProject: (path = ''): string => urls.project(getCurrentTeamId(), path),
- newTab: () => '/new',
+ newTab: () => '/search',
eventDefinitions: (): string => '/data-management/events',
eventDefinition: (id: string | number): string => `/data-management/events/${id}`,
eventDefinitionEdit: (id: string | number): string => `/data-management/events/${id}/edit`,
@@ -105,6 +105,7 @@ export const urls = {
`/verify_email${userUuid ? `/${userUuid}` : ''}${token ? `/${token}` : ''}`,
inviteSignup: (id: string): string => `/signup/${id}`,
products: (): string => '/products',
+ useCaseSelection: (): string => '/onboarding/use-case',
onboarding: (productKey: string, stepKey?: OnboardingStepKey, sdk?: SDKKey): string =>
`/onboarding/${productKey}${stepKey ? '?step=' + stepKey : ''}${
sdk && stepKey ? '&sdk=' + sdk : sdk ? '?sdk=' + sdk : ''
@@ -152,7 +153,8 @@ export const urls = {
moveToPostHogCloud: (): string => '/move-to-cloud',
heatmaps: (params?: string): string =>
`/heatmaps${params ? `?${params.startsWith('?') ? params.slice(1) : params}` : ''}`,
- heatmapNew: (): string => `/heatmaps/new`,
+ heatmapNew: (params?: string): string =>
+ `/heatmaps/new${params ? `?${params.startsWith('?') ? params.slice(1) : params}` : ''}`,
heatmapRecording: (params?: string): string =>
`/heatmaps/recording${params ? `?${params.startsWith('?') ? params.slice(1) : params}` : ''}`,
heatmap: (id: string | number): string => `/heatmaps/${id}`,
@@ -160,6 +162,7 @@ export const urls = {
`/links${params ? `?${params.startsWith('?') ? params.slice(1) : params}` : ''}`,
link: (id: string): string => `/link/${id}`,
sessionAttributionExplorer: (): string => '/web/session-attribution-explorer',
+ sessionProfile: (id: string): string => `/sessions/${id}`,
wizard: (): string => `/wizard`,
startups: (referrer?: string): string => `/startups${referrer ? `/${referrer}` : ''}`,
oauthAuthorize: (): string => '/oauth/authorize',
diff --git a/frontend/src/scenes/web-analytics/CrossSellButtons/HeatmapButton.tsx b/frontend/src/scenes/web-analytics/CrossSellButtons/HeatmapButton.tsx
index 407792ce23..528428f561 100644
--- a/frontend/src/scenes/web-analytics/CrossSellButtons/HeatmapButton.tsx
+++ b/frontend/src/scenes/web-analytics/CrossSellButtons/HeatmapButton.tsx
@@ -62,7 +62,7 @@ export const HeatmapButton = ({ breakdownBy, value }: HeatmapButtonProps): JSX.E
return (
}
type="tertiary"
size="xsmall"
diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsFilters.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsFilters.tsx
index 04c04717b5..6d3d98f9c6 100644
--- a/frontend/src/scenes/web-analytics/WebAnalyticsFilters.tsx
+++ b/frontend/src/scenes/web-analytics/WebAnalyticsFilters.tsx
@@ -10,6 +10,7 @@ import { FEATURE_FLAGS } from 'lib/constants'
import { LemonSegmentedSelect } from 'lib/lemon-ui/LemonSegmentedSelect'
import { IconBranch, IconMonitor } from 'lib/lemon-ui/icons/icons'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
+import MaxTool from 'scenes/max/MaxTool'
import { urls } from 'scenes/urls'
import { userLogic } from 'scenes/userLogic'
@@ -60,13 +61,66 @@ export const WebAnalyticsFilters = ({ tabs }: { tabs: JSX.Element }): JSX.Elemen
-
+
>
}
/>
)
}
+const WebAnalyticsAIFilters = (): JSX.Element => {
+ const {
+ dateFilter: { dateTo, dateFrom },
+ rawWebAnalyticsFilters,
+ isPathCleaningEnabled,
+ compareFilter,
+ } = useValues(webAnalyticsLogic)
+ const { setDates, setWebAnalyticsFilters, setIsPathCleaningEnabled, setCompareFilter } =
+ useActions(webAnalyticsLogic)
+ const { featureFlags } = useValues(featureFlagLogic)
+
+ if (!featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_POSTHOG_AI]) {
+ return
+ }
+
+ return (
+ ) => {
+ if (toolOutput.properties !== undefined) {
+ setWebAnalyticsFilters(toolOutput.properties)
+ }
+ if (toolOutput.date_from !== undefined && toolOutput.date_to !== undefined) {
+ setDates(toolOutput.date_from, toolOutput.date_to)
+ }
+ if (toolOutput.doPathCleaning !== undefined) {
+ setIsPathCleaningEnabled(toolOutput.doPathCleaning)
+ }
+ if (toolOutput.compareFilter !== undefined) {
+ setCompareFilter(toolOutput.compareFilter)
+ }
+ }}
+ initialMaxPrompt="Filter web analytics data for "
+ suggestions={[
+ 'Show mobile traffic from last 30 days for the US',
+ 'Filter only sessions greater than 2 minutes coming from organic search',
+ "Don't include direct traffic and show data for the last 7 days",
+ ]}
+ >
+
+
+ )
+}
+
const PathCleaningToggle = (): JSX.Element | null => {
const { isPathCleaningEnabled } = useValues(webAnalyticsLogic)
const { setIsPathCleaningEnabled } = useActions(webAnalyticsLogic)
diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsErrorTracking.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsErrorTracking.tsx
index bd7b5bdbf5..a29df980db 100644
--- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsErrorTracking.tsx
+++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsErrorTracking.tsx
@@ -24,26 +24,25 @@ export const CustomGroupTitleColumn: QueryContextColumnComponent = (props) => {
return (
- {record.description}
-
-
-
-
- }
- className="flex-1"
+ description={{record.description} }
+ className="flex"
to={urls.errorTrackingIssue(record.id)}
/>
)
}
+const LastSeenColumn = ({ record }: { record: unknown }): JSX.Element => {
+ const last_seen = (record as ErrorTrackingIssue).last_seen
+ return
+}
+
const CountColumn = ({ record, columnName }: { record: unknown; columnName: string }): JSX.Element => {
const aggregations = (record as ErrorTrackingIssue).aggregations!
const count = aggregations[columnName as 'occurrences' | 'users']
- return {humanFriendlyLargeNumber(count)}
+ return <>{humanFriendlyLargeNumber(count)}>
}
const context: QueryContext = {
@@ -52,7 +51,7 @@ const context: QueryContext = {
showQueryEditor: false,
columns: {
error: {
- width: '50%',
+ align: 'left',
render: CustomGroupTitleColumn,
},
users: {
@@ -63,6 +62,11 @@ const context: QueryContext = {
align: 'right',
render: CountColumn,
},
+ last_seen: {
+ title: 'Last seen',
+ align: 'right',
+ render: LastSeenColumn,
+ },
},
}
diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
index aa6cb3dc7c..2eaf51ca4b 100644
--- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
+++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
@@ -1,19 +1,25 @@
import clsx from 'clsx'
import { useActions, useValues } from 'kea'
+import { IconRewindPlay } from '@posthog/icons'
+
import { EmptyMessage } from 'lib/components/EmptyMessage/EmptyMessage'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton'
+import { LemonTable } from 'lib/lemon-ui/LemonTable'
+import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture'
import { IconOpenInNew } from 'lib/lemon-ui/icons'
+import { humanFriendlyDuration } from 'lib/utils'
import { ProductIntentContext } from 'lib/utils/product-intents'
-import { RecordingRow } from 'scenes/session-recordings/components/RecordingRow'
+import { asDisplay } from 'scenes/persons/person-utils'
+import { ActivityScoreLabel } from 'scenes/session-recordings/components/RecordingRow'
import { sessionRecordingsPlaylistLogic } from 'scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic'
import { teamLogic } from 'scenes/teamLogic'
import { urls } from 'scenes/urls'
import { ReplayTile } from 'scenes/web-analytics/common'
import { webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic'
-import { ProductKey, ReplayTabs } from '~/types'
+import { ProductKey, ReplayTabs, SessionRecordingType } from '~/types'
export function WebAnalyticsRecordingsTile({ tile }: { tile: ReplayTile }): JSX.Element {
const { layout } = tile
@@ -72,7 +78,50 @@ export function WebAnalyticsRecordingsTile({ tile }: { tile: ReplayTile }): JSX.
) : items.length === 0 && emptyMessage ? (
) : (
- items.map((item, index) => )
+ (
+ <>
+
+ {asDisplay(recording.person)}{' '}
+ >
+ ),
+ },
+ {
+ title: 'Activity',
+ render: (_, recording: SessionRecordingType) => (
+ <>
+
+ >
+ ),
+ },
+ {
+ title: 'Duration',
+ render: (_, recording: SessionRecordingType) => (
+ <>{humanFriendlyDuration(recording.recording_duration)}>
+ ),
+ },
+ {
+ title: '',
+ render: (_, recording: SessionRecordingType) => (
+ }
+ />
+ ),
+ },
+ ]}
+ dataSource={items}
+ />
)}
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsExportUtils.ts b/frontend/src/scenes/web-analytics/webAnalyticsExportUtils.ts
index 9558731bd0..4064c4e70f 100644
--- a/frontend/src/scenes/web-analytics/webAnalyticsExportUtils.ts
+++ b/frontend/src/scenes/web-analytics/webAnalyticsExportUtils.ts
@@ -149,7 +149,8 @@ class WebAnalyticsTableAdapter implements ExportAdapter {
private getWebAnalyticsTableData(
columns: string[],
- source: WebStatsTableQuery | WebGoalsQuery | WebExternalClicksTableQuery
+ source: WebStatsTableQuery | WebGoalsQuery | WebExternalClicksTableQuery,
+ keptIndices: number[]
): string[][] {
if (!this.response.results || this.response.results.length === 0 || !columns.length) {
return []
@@ -159,7 +160,7 @@ class WebAnalyticsTableAdapter implements ExportAdapter {
const breakdownBy = isWebStatsTableQuery(source) ? source.breakdownBy : undefined
const firstRow = this.response.results[0] as any[]
- const columnHasComparison = columns.map((_, colIndex) => Array.isArray(firstRow[colIndex]))
+ const columnHasComparison = columns.map((_, colIndex) => Array.isArray(firstRow[keptIndices[colIndex]]))
const displayHeaders = hasComparison
? columns.flatMap((col, colIndex) => {
@@ -174,7 +175,7 @@ class WebAnalyticsTableAdapter implements ExportAdapter {
const dataRows = this.response.results.map((result) => {
const row = result as any[]
return columns.flatMap((_, colIndex) => {
- const value = row[colIndex]
+ const value = row[keptIndices[colIndex]]
if (hasComparison && Array.isArray(value)) {
return [value[0] != null ? String(value[0]) : '', value[1] != null ? String(value[1]) : '']
}
@@ -191,8 +192,17 @@ class WebAnalyticsTableAdapter implements ExportAdapter {
}
const dataTableQuery = this.query as DataTableNode
const source = dataTableQuery.source as WebStatsTableQuery | WebGoalsQuery | WebExternalClicksTableQuery
- const columns = (this.response.columns as string[]) || []
- return this.getWebAnalyticsTableData(columns, source)
+ const allColumns = (this.response.columns as string[]) || []
+
+ // Filter out internal UI state columns that shouldn't be exported
+ const columnsToKeep = allColumns
+ .map((col, index) => ({ col, index }))
+ .filter(({ col }) => !col.includes('ui_fill_fraction') && !col.includes('cross_sell'))
+
+ const filteredColumns = columnsToKeep.map(({ col }) => col)
+ const keptIndices = columnsToKeep.map(({ index }) => index)
+
+ return this.getWebAnalyticsTableData(filteredColumns, source, keptIndices)
}
canHandle(): boolean {
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
index 8a7cb787e3..d9fc29aed9 100644
--- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
+++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
@@ -1158,7 +1158,7 @@ export const webAnalyticsLogic = kea([
dateRange: dateRange,
filterTestAccounts: filterTestAccounts,
filterGroup: replayFilters.filter_group,
- columns: ['error', 'users', 'occurrences'],
+ columns: ['error', 'users', 'occurrences', 'last_seen'],
limit: 4,
})
} catch (e) {
@@ -1190,13 +1190,10 @@ export const webAnalyticsLogic = kea([
kind: NodeKind.TrendsQuery,
dateRange,
interval,
- series: (['INP', 'LCP', 'CLS', 'FCP'] as WebVitalsMetric[]).flatMap((metric) =>
- [PropertyMathType.P75, PropertyMathType.P90, PropertyMathType.P99].map((math) =>
- createSeries(metric, math)
- )
+ series: (['INP', 'LCP', 'CLS', 'FCP'] as WebVitalsMetric[]).map((metric) =>
+ createSeries(metric, webVitalsPercentile)
),
trendsFilter: { display: ChartDisplayType.ActionsLineGraph },
- compareFilter,
filterTestAccounts,
properties: webAnalyticsFilters,
},
diff --git a/frontend/src/taxonomy/core-filter-definitions-by-group.json b/frontend/src/taxonomy/core-filter-definitions-by-group.json
index 17799ca4d5..e33c1d4690 100644
--- a/frontend/src/taxonomy/core-filter-definitions-by-group.json
+++ b/frontend/src/taxonomy/core-filter-definitions-by-group.json
@@ -1178,6 +1178,12 @@
"type": "String",
"used_for_debug": true
},
+ "$sdk_debug_replay_flushed_size": {
+ "description": "Estimated size in bytes of flushed recording data so far in this session. Added to events as a debug property.",
+ "label": "Estimated bytes flushed",
+ "type": "Numeric",
+ "used_for_debug": true
+ },
"$sdk_debug_replay_internal_buffer_length": {
"description": "Useful for debugging. The internal buffer length for replay.",
"examples": ["100"],
@@ -1734,6 +1740,18 @@
"description": "Snapchat Click ID",
"label": "sccid"
},
+ "sdk_debug_extensions_init_method": {
+ "description": "The method used to initialize PostHog.js extensions.",
+ "examples": ["deferred", "synchronous"],
+ "label": "PostHog.js extensions init method",
+ "used_for_debug": true
+ },
+ "sdk_debug_extensions_init_time_ms": {
+ "description": "The time taken to initialize PostHog.js extensions in milliseconds.",
+ "examples": ["150"],
+ "label": "PostHog.js extensions init time (ms)",
+ "used_for_debug": true
+ },
"token": {
"description": "Token used for authentication.",
"examples": ["ph_abcdefg"],
@@ -3417,6 +3435,12 @@
"type": "String",
"used_for_debug": true
},
+ "$sdk_debug_replay_flushed_size": {
+ "description": "Estimated size in bytes of flushed recording data so far in this session. Added to events as a debug property.",
+ "label": "Estimated bytes flushed",
+ "type": "Numeric",
+ "used_for_debug": true
+ },
"$sdk_debug_replay_internal_buffer_length": {
"description": "Useful for debugging. The internal buffer length for replay.",
"examples": ["100"],
@@ -4005,6 +4029,18 @@
"description": "Snapchat Click ID Data from the last time this user was seen.",
"label": "Latest sccid"
},
+ "sdk_debug_extensions_init_method": {
+ "description": "The method used to initialize PostHog.js extensions.",
+ "examples": ["deferred", "synchronous"],
+ "label": "PostHog.js extensions init method",
+ "used_for_debug": true
+ },
+ "sdk_debug_extensions_init_time_ms": {
+ "description": "The time taken to initialize PostHog.js extensions in milliseconds.",
+ "examples": ["150"],
+ "label": "PostHog.js extensions init time (ms)",
+ "used_for_debug": true
+ },
"token": {
"description": "Token used for authentication.",
"examples": ["ph_abcdefg"],
diff --git a/frontend/src/taxonomy/taxonomy.tsx b/frontend/src/taxonomy/taxonomy.tsx
index 270050715c..e9037dc802 100644
--- a/frontend/src/taxonomy/taxonomy.tsx
+++ b/frontend/src/taxonomy/taxonomy.tsx
@@ -159,6 +159,13 @@ export const POSTHOG_EVENT_PROMOTED_PROPERTIES = {
'$csp_user_agent',
],
$set: ['$set', '$set_once'],
+ $exception: [
+ '$exception_issue_id',
+ '$exception_functions',
+ '$exception_sources',
+ '$exception_types',
+ '$exception_values',
+ ],
}
export type KNOWN_PROMOTED_PROPERTY_PARENTS = keyof typeof POSTHOG_EVENT_PROMOTED_PROPERTIES
diff --git a/frontend/src/test/mocks/rawFileMock.js b/frontend/src/test/mocks/rawFileMock.js
new file mode 100644
index 0000000000..b0c50903a9
--- /dev/null
+++ b/frontend/src/test/mocks/rawFileMock.js
@@ -0,0 +1 @@
+module.exports = ''
diff --git a/frontend/src/toolbar/elements/heatmapToolbarMenuLogic.ts b/frontend/src/toolbar/elements/heatmapToolbarMenuLogic.ts
index 6ba6892dfd..6ad6a258d6 100644
--- a/frontend/src/toolbar/elements/heatmapToolbarMenuLogic.ts
+++ b/frontend/src/toolbar/elements/heatmapToolbarMenuLogic.ts
@@ -458,6 +458,7 @@ export const heatmapToolbarMenuLogic = kea([
}
},
+ // setHref is called when the page url changes
setHref: ({ href }) => {
if (values.heatmapEnabled) {
actions.setHrefMatchType(href === window.location.href ? 'exact' : 'pattern')
@@ -465,8 +466,13 @@ export const heatmapToolbarMenuLogic = kea([
}
actions.maybeLoadClickmap()
},
+
setWildcardHref: ({ href }) => {
- actions.setDataHref(href)
+ if (values.heatmapEnabled) {
+ actions.setHrefMatchType(href === window.location.href ? 'exact' : 'pattern')
+ actions.setDataHref(href)
+ }
+ actions.maybeLoadClickmap()
},
setCommonFilters: () => {
actions.loadAllEnabled()
diff --git a/frontend/src/toolbar/stats/currentPageLogic.ts b/frontend/src/toolbar/stats/currentPageLogic.ts
index da81ec4788..f8ed0432ab 100644
--- a/frontend/src/toolbar/stats/currentPageLogic.ts
+++ b/frontend/src/toolbar/stats/currentPageLogic.ts
@@ -54,9 +54,9 @@ export const currentPageLogic = kea([
autoWildcardHref: true,
})),
reducers(() => ({
- href: [window.location.href, { setHref: (_, { href }) => withoutPostHogInit(href) }],
+ href: [withoutPostHogInit(window.location.href), { setHref: (_, { href }) => withoutPostHogInit(href) }],
wildcardHref: [
- window.location.href,
+ withoutPostHogInit(window.location.href),
{
setHref: (_, { href }) => withoutPostHogInit(href),
setWildcardHref: (_, { href }) => withoutPostHogInit(href),
@@ -92,8 +92,6 @@ export const currentPageLogic = kea([
})),
afterMount(({ actions, values, cache }) => {
- actions.setHref(withoutPostHogInit(values.href))
-
cache.disposables.add(
makeNavigateWrapper((): void => {
if (window.location.href !== values.href) {
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index d4d3b7d940..22f084ad53 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -244,6 +244,8 @@ export enum ProductKey {
MAX = 'max',
LINKS = 'links',
ENDPOINTS = 'endpoints',
+ CUSTOMER_ANALYTICS = 'customer_analytics',
+ LOGS = 'logs',
}
type ProductKeyUnion = `${ProductKey}`
@@ -282,11 +284,13 @@ export enum Region {
}
export type SSOProvider = 'google-oauth2' | 'github' | 'gitlab' | 'saml'
+export type LoginMethod = SSOProvider | 'password' | null
export interface AuthBackends {
'google-oauth2'?: boolean
gitlab?: boolean
github?: boolean
+ saml?: boolean
}
export type ColumnChoice = string[] | 'DEFAULT'
@@ -312,6 +316,7 @@ export enum AccessControlResourceType {
Survey = 'survey',
Experiment = 'experiment',
WebAnalytics = 'web_analytics',
+ ActivityLog = 'activity_log',
}
interface UserBaseType {
@@ -858,6 +863,7 @@ export enum ExperimentsTabs {
export enum ActivityTab {
ExploreEvents = 'explore',
+ ExploreSessions = 'sessions',
LiveEvents = 'live',
}
@@ -1587,6 +1593,10 @@ export interface RecordingEventType
distinct_id?: EventType['distinct_id']
}
+export interface SessionEventType extends Pick {
+ fullyLoaded: boolean
+ distinct_id?: EventType['distinct_id']
+}
export interface PlaylistCollectionCount {
count: number
watched_count: number
@@ -3950,6 +3960,14 @@ export interface Experiment {
user_access_level: AccessControlLevel
}
+export interface ExperimentVelocityStats {
+ launched_last_30d: number
+ launched_previous_30d: number
+ percent_change: number
+ active_experiments: number
+ completed_last_30d: number
+}
+
export interface FunnelExperimentVariant {
key: string
success_count: number
@@ -4774,6 +4792,18 @@ export interface DataWarehouseTable {
export type DataWarehouseTableTypes = 'CSV' | 'Parquet' | 'JSON' | 'CSVWithNames'
+export type DataModelingJobStatus = 'Running' | 'Completed' | 'Failed' | 'Cancelled'
+
+export interface DataWarehouseSavedQueryRunHistory {
+ status: DataModelingJobStatus
+ timestamp?: string
+}
+
+export interface DataWarehouseSavedQueryDependencies {
+ upstream_count: number
+ downstream_count: number
+}
+
export interface DataWarehouseSavedQuery {
/** UUID */
id: string
@@ -4787,6 +4817,10 @@ export interface DataWarehouseSavedQuery {
latest_error: string | null
latest_history_id?: string
is_materialized?: boolean
+ upstream_dependency_count?: number
+ downstream_dependency_count?: number
+ created_at?: string
+ run_history?: DataWarehouseSavedQueryRunHistory[]
}
export interface DataWarehouseSavedQueryDraft {
@@ -5302,7 +5336,7 @@ export enum SDKKey {
}
export enum SDKTag {
- RECOMMENDED = 'Recommended',
+ POPULAR = 'Most popular',
WEB = 'Web',
MOBILE = 'Mobile',
SERVER = 'Server',
@@ -5391,6 +5425,7 @@ export type AvailableOnboardingProducts = Record<
export type OnboardingProduct = {
name: string
breadcrumbsName?: string
+ description: string
icon: string
iconColor: string
url: string
diff --git a/funnel-udf/Cargo.lock b/funnel-udf/Cargo.lock
index c24bd48c14..858f3a3da5 100644
--- a/funnel-udf/Cargo.lock
+++ b/funnel-udf/Cargo.lock
@@ -2,12 +2,27 @@
# It is not intended for manual editing.
version = 4
+[[package]]
+name = "aho-corasick"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
+dependencies = [
+ "memchr",
+]
+
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
+[[package]]
+name = "bumpalo"
+version = "3.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
+
[[package]]
name = "cfg-if"
version = "1.0.0"
@@ -20,33 +35,106 @@ version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
[[package]]
name = "funnels"
version = "0.1.0"
dependencies = [
"itertools",
"ordered-float",
+ "rstest",
"serde",
"serde_json",
"uuid",
]
[[package]]
-name = "getrandom"
-version = "0.2.15"
+name = "futures-core"
+version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
+checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+
+[[package]]
+name = "futures-macro"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "futures-task"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+
+[[package]]
+name = "futures-timer"
+version = "3.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
+
+[[package]]
+name = "futures-util"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+dependencies = [
+ "futures-core",
+ "futures-macro",
+ "futures-task",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
"cfg-if",
"libc",
- "wasi",
+ "r-efi",
+ "wasip2",
+]
+
+[[package]]
+name = "glob"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
+
+[[package]]
+name = "hashbrown"
+version = "0.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
+
+[[package]]
+name = "indexmap"
+version = "2.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
+dependencies = [
+ "equivalent",
+ "hashbrown",
]
[[package]]
name = "itertools"
-version = "0.11.0"
+version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
dependencies = [
"either",
]
@@ -57,6 +145,16 @@ version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
+[[package]]
+name = "js-sys"
+version = "0.3.82"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
[[package]]
name = "libc"
version = "0.2.159"
@@ -79,14 +177,41 @@ dependencies = [
]
[[package]]
-name = "ordered-float"
-version = "5.0.0"
+name = "once_cell"
+version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2c1f9f56e534ac6a9b8a4600bdf0f530fb393b5f393e7b4d03489c3cf0c3f01"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+
+[[package]]
+name = "ordered-float"
+version = "5.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
dependencies = [
"num-traits",
]
+[[package]]
+name = "pin-project-lite"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "proc-macro-crate"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
+dependencies = [
+ "toml_edit",
+]
+
[[package]]
name = "proc-macro2"
version = "1.0.86"
@@ -105,6 +230,91 @@ dependencies = [
"proc-macro2",
]
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "regex"
+version = "1.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
+
+[[package]]
+name = "relative-path"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
+
+[[package]]
+name = "rstest"
+version = "0.26.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f5a3193c063baaa2a95a33f03035c8a72b83d97a54916055ba22d35ed3839d49"
+dependencies = [
+ "futures-timer",
+ "futures-util",
+ "rstest_macros",
+]
+
+[[package]]
+name = "rstest_macros"
+version = "0.26.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c845311f0ff7951c5506121a9ad75aec44d083c31583b2ea5a30bcb0b0abba0"
+dependencies = [
+ "cfg-if",
+ "glob",
+ "proc-macro-crate",
+ "proc-macro2",
+ "quote",
+ "regex",
+ "relative-path",
+ "rustc_version",
+ "syn",
+ "unicode-ident",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
[[package]]
name = "ryu"
version = "1.0.18"
@@ -112,19 +322,35 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
[[package]]
-name = "serde"
-version = "1.0.210"
+name = "semver"
+version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
+checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
+
+[[package]]
+name = "serde"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.210"
+version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
+checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
@@ -133,27 +359,64 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.128"
+version = "1.0.145"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
+checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
+ "serde_core",
]
[[package]]
-name = "syn"
-version = "2.0.77"
+name = "slab"
+version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
+checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
+
+[[package]]
+name = "syn"
+version = "2.0.87"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
+[[package]]
+name = "toml_datetime"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.23.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d"
+dependencies = [
+ "indexmap",
+ "toml_datetime",
+ "toml_parser",
+ "winnow",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
+dependencies = [
+ "winnow",
+]
+
[[package]]
name = "unicode-ident"
version = "1.0.13"
@@ -162,16 +425,81 @@ checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
[[package]]
name = "uuid"
-version = "1.10.0"
+version = "1.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
+checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
dependencies = [
"getrandom",
+ "js-sys",
"serde",
+ "wasm-bindgen",
]
[[package]]
-name = "wasi"
-version = "0.11.0+wasi-snapshot-preview1"
+name = "wasip2"
+version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
+dependencies = [
+ "bumpalo",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.105"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "winnow"
+version = "0.7.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "wit-bindgen"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
diff --git a/funnel-udf/Cargo.toml b/funnel-udf/Cargo.toml
index caa5cf6be6..0e5b1d21c9 100644
--- a/funnel-udf/Cargo.toml
+++ b/funnel-udf/Cargo.toml
@@ -4,11 +4,14 @@ version = "0.1.0"
edition = "2021"
[dependencies]
-serde = { version = "1.0.104", features = ["derive"] }
-serde_json = "1.0.48"
-itertools = "0.11"
-uuid = { version = "1.10.0", features = ["v4", "serde"] }
-ordered-float = "5.0.0"
+serde = { version = "1.0.228", features = ["derive"] }
+serde_json = "1.0.145"
+itertools = "0.14.0"
+uuid = { version = "1.18.1", features = ["v4", "serde"] }
+ordered-float = "5.1.0"
+
+[dev-dependencies]
+rstest = "0.26.1"
[profile.release]
lto = true
diff --git a/funnel-udf/build.sh b/funnel-udf/build.sh
old mode 100644
new mode 100755
diff --git a/funnel-udf/src/main.rs b/funnel-udf/src/main.rs
index 104519c4f3..20463b6538 100644
--- a/funnel-udf/src/main.rs
+++ b/funnel-udf/src/main.rs
@@ -1,3 +1,4 @@
+mod parsing;
mod steps;
mod trends;
mod unordered_steps;
diff --git a/funnel-udf/src/parsing.rs b/funnel-udf/src/parsing.rs
new file mode 100644
index 0000000000..6bce1788c5
--- /dev/null
+++ b/funnel-udf/src/parsing.rs
@@ -0,0 +1,58 @@
+use serde::{Deserialize, Deserializer};
+use std::str::FromStr;
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum U64OrString {
+ U64(u64),
+ String(String),
+}
+
+pub fn u64_or_string<'de, D>(deserializer: D) -> Result
+where
+ D: Deserializer<'de>,
+{
+ match U64OrString::deserialize(deserializer)? {
+ U64OrString::U64(v) => Ok(v),
+ U64OrString::String(v) => u64::from_str(&v).map_err(serde::de::Error::custom),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rstest::rstest;
+ use serde::Deserialize;
+
+ #[derive(Deserialize)]
+ struct TestStruct {
+ #[serde(deserialize_with = "u64_or_string")]
+ value: u64,
+ }
+
+ #[rstest]
+ #[case(r#"{"value": 0}"#, 0)]
+ #[case(r#"{"value": 12345}"#, 12345)]
+ #[case(r#"{"value": 67890}"#, 67890)]
+ #[case(r#"{"value": 18446744073709551615}"#, u64::MAX)]
+ #[case(r#"{"value": "0"}"#, 0)]
+ #[case(r#"{"value": "12345"}"#, 12345)]
+ #[case(r#"{"value": "67890"}"#, 67890)]
+ #[case(r#"{"value": "18446744073709551615"}"#, u64::MAX)]
+ fn test_deserialize_u64_valid(#[case] json: &str, #[case] expected: u64) {
+ let result: TestStruct = serde_json::from_str(json).unwrap();
+ assert_eq!(result.value, expected);
+ }
+
+ #[rstest]
+ #[case(r#"{"value": "not_a_number"}"#)]
+ #[case(r#"{"value": "-123"}"#)]
+ #[case(r#"{"value": ""}"#)]
+ #[case(r#"{"value": "18446744073709551616"}"#)] // u64::MAX + 1
+ #[case(r#"{"value": "12.34"}"#)]
+ #[case(r#"{"value": " 123 "}"#)]
+ fn test_deserialize_u64_invalid(#[case] json: &str) {
+ let result: Result = serde_json::from_str(json);
+ assert!(result.is_err());
+ }
+}
diff --git a/funnel-udf/src/steps.rs b/funnel-udf/src/steps.rs
index f6958f5d92..472198b608 100644
--- a/funnel-udf/src/steps.rs
+++ b/funnel-udf/src/steps.rs
@@ -1,3 +1,4 @@
+use crate::parsing::u64_or_string;
use crate::unordered_steps::AggregateFunnelRowUnordered;
use crate::PropVal;
use itertools::Itertools;
@@ -26,6 +27,7 @@ pub struct Event {
#[derive(Deserialize)]
pub struct Args {
pub num_steps: usize,
+ #[serde(deserialize_with = "u64_or_string")]
pub conversion_window_limit: u64, // In seconds
pub breakdown_attribution_type: String,
pub funnel_order_type: String,
diff --git a/funnel-udf/src/trends.rs b/funnel-udf/src/trends.rs
index 041ff7acfb..3d94d6a522 100644
--- a/funnel-udf/src/trends.rs
+++ b/funnel-udf/src/trends.rs
@@ -1,20 +1,12 @@
+use crate::parsing::u64_or_string;
use crate::unordered_trends::AggregateFunnelRowUnordered;
use crate::PropVal;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::collections::HashMap;
-use std::str::FromStr;
use uuid::Uuid;
-fn deserialize_number_from_string<'de, D>(deserializer: D) -> Result
-where
- D: serde::Deserializer<'de>,
-{
- let s = String::deserialize(deserializer)?;
- u64::from_str(&s).map_err(serde::de::Error::custom)
-}
-
#[derive(Clone, Deserialize)]
pub struct EnteredTimestamp {
pub timestamp: f64,
@@ -24,7 +16,7 @@ pub struct EnteredTimestamp {
#[derive(Debug, Clone, Deserialize)]
pub struct Event {
pub timestamp: f64,
- #[serde(deserialize_with = "deserialize_number_from_string")]
+ #[serde(deserialize_with = "u64_or_string")]
pub interval_start: u64,
pub uuid: Uuid,
pub breakdown: PropVal,
@@ -36,6 +28,7 @@ pub struct Args {
pub from_step: usize,
pub to_step: usize,
pub num_steps: usize,
+ #[serde(deserialize_with = "u64_or_string")]
pub conversion_window_limit: u64, // In seconds
pub breakdown_attribution_type: String,
pub funnel_order_type: String,
diff --git a/gunicorn.config.py b/gunicorn.config.py
deleted file mode 100644
index 3e88acdae9..0000000000
--- a/gunicorn.config.py
+++ /dev/null
@@ -1,253 +0,0 @@
-#!/usr/bin/env python3
-
-import os
-import sys
-import time
-import socket
-import struct
-import logging
-import threading
-
-import structlog
-from prometheus_client import CollectorRegistry, Gauge, multiprocess, start_http_server
-
-loglevel = "error"
-keepalive = 120
-
-# Set the timeout to something lower than any downstreams, such that if the
-# timeout is hit, then the worker will be killed and respawned, which will then
-# we able to pick up any connections that were previously pending on the socket
-# and serve the requests before the downstream timeout.
-timeout = 15
-
-grateful_timeout = 120
-
-
-METRICS_UPDATE_INTERVAL_SECONDS = int(os.getenv("GUNICORN_METRICS_UPDATE_SECONDS", 5))
-
-
-def when_ready(server):
- """
- To ease being able to hide the /metrics endpoint when running in production,
- we serve the metrics on a separate port, using the
- prometheus_client.multiprocess Collector to pull in data from the worker
- processes.
- """
- registry = CollectorRegistry()
- multiprocess.MultiProcessCollector(registry)
- port = int(os.environ.get("PROMETHEUS_METRICS_EXPORT_PORT", 8001))
- start_http_server(port=port, registry=registry)
-
- # Start a thread in the Arbiter that will monitor the backlog on the sockets
- # Gunicorn is listening on.
- socket_monitor = SocketMonitor(server=server, registry=registry)
- socket_monitor.start()
-
-
-def post_fork(server, worker):
- """
- Within each worker process, start a thread that will monitor the thread and
- connection pool.
- """
- worker_monitor = WorkerMonitor(worker=worker)
- worker_monitor.start()
-
-
-def worker_exit(server, worker):
- """
- Ensure that we mark workers as dead with the prometheus_client such that
- any cleanup can happen.
- """
- multiprocess.mark_process_dead(worker.pid)
-
-
-class SocketMonitor(threading.Thread):
- """
- We have enabled the statsd collector for Gunicorn, but this doesn't include
- the backlog due to concerns over portability, see
- https://github.com/benoitc/gunicorn/pull/2407
-
- Instead, we expose to Prometheus a gauge that will report the backlog size.
-
- We can then:
-
- 1. use this to monitor how well the Gunicorn instances are keeping up with
- requests.
- 2. use this metric to handle HPA scaling e.g. in Kubernetes
-
- """
-
- def __init__(self, server, registry):
- super().__init__()
- self.daemon = True
- self.server = server
- self.registry = registry
-
- def run(self):
- """
- Every X seconds, check to see how many connections are pending for each
- server socket.
-
- We label each individually, as limits such as `--backlog` will apply to
- each individually.
- """
- if sys.platform != "linux":
- # We use the assumption that we are on Linux to be able to get the
- # socket backlog, so if we're not on Linux, we return immediately.
- return
-
- backlog_gauge = Gauge(
- "gunicorn_pending_connections",
- "The number of pending connections on all sockets. Linux only.",
- registry=self.registry,
- labelnames=["listener"],
- )
-
- while True:
- for sock in self.server.LISTENERS:
- backlog = self.get_backlog(sock=sock)
- backlog_gauge.labels(listener=str(sock)).set(backlog)
-
- time.sleep(METRICS_UPDATE_INTERVAL_SECONDS)
-
- def get_backlog(self, sock):
- # tcp_info struct from include/uapi/linux/tcp.h
- fmt = "B" * 8 + "I" * 24
- tcp_info_struct = sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_INFO, 104)
- # 12 is tcpi_unacked
- return struct.unpack(fmt, tcp_info_struct)[12]
-
-
-class WorkerMonitor(threading.Thread):
- """
- There is a statsd logger support in Gunicorn that allows us to gather
- metrics e.g. on the number of workers, requests, request duration etc. See
- https://docs.gunicorn.org/en/stable/instrumentation.html for details.
-
- To get a better understanding of the pool utilization, number of accepted
- connections, we start a thread in head worker to report these via prometheus
- metrics.
- """
-
- def __init__(self, worker):
- super().__init__()
- self.daemon = True
- self.worker = worker
-
- def run(self):
- """
- Every X seconds, check the status of the Thread pool, as well as the
- """
- active_worker_connections = Gauge(
- "gunicorn_active_worker_connections",
- "Number of active connections.",
- labelnames=["pid"],
- )
- max_worker_connections = Gauge(
- "gunicorn_max_worker_connections",
- "Maximum worker connections.",
- labelnames=["pid"],
- )
-
- total_threads = Gauge(
- "gunicorn_max_worker_threads",
- "Size of the thread pool per worker.",
- labelnames=["pid"],
- )
- active_threads = Gauge(
- "gunicorn_active_worker_threads",
- "Number of threads actively processing requests.",
- labelnames=["pid"],
- )
-
- pending_requests = Gauge(
- "gunicorn_pending_requests",
- "Number of requests that have been read from a connection but have not completed yet",
- labelnames=["pid"],
- )
-
- max_worker_connections.labels(pid=self.worker.pid).set(self.worker.cfg.worker_connections)
- total_threads.labels(pid=self.worker.pid).set(self.worker.cfg.threads)
-
- while True:
- active_worker_connections.labels(pid=self.worker.pid).set(self.worker.nr_conns)
- active_threads.labels(pid=self.worker.pid).set(min(self.worker.cfg.threads, len(self.worker.futures)))
- pending_requests.labels(pid=self.worker.pid).set(len(self.worker.futures))
-
- time.sleep(METRICS_UPDATE_INTERVAL_SECONDS)
-
-
-LOGGING_FORMATTER_NAME = os.getenv("LOGGING_FORMATTER_NAME", "default")
-
-
-# Setup stdlib logging to be handled by Structlog
-def add_pid_and_tid(
- logger: logging.Logger, method_name: str, event_dict: structlog.types.EventDict
-) -> structlog.types.EventDict:
- event_dict["pid"] = os.getpid()
- event_dict["tid"] = threading.get_ident()
- return event_dict
-
-
-pre_chain = [
- # Add the log level and a timestamp to the event_dict if the log entry
- # is not from structlog.
- structlog.stdlib.add_log_level,
- structlog.stdlib.add_logger_name,
- add_pid_and_tid,
- structlog.processors.TimeStamper(fmt="iso"),
-]
-
-
-# This is a copy the default logging config for gunicorn but with additions to:
-#
-# 1. non propagate loggers to the root handlers (otherwise we get duplicate log
-# lines)
-# 2. use structlog for processing of log records
-#
-# See
-# https://github.com/benoitc/gunicorn/blob/0b953b803786997d633d66c0f7c7b290df75e07c/gunicorn/glogging.py#L48
-# for the default log settings.
-logconfig_dict = {
- "version": 1,
- "disable_existing_loggers": True,
- "formatters": {
- "default": {
- "()": structlog.stdlib.ProcessorFormatter,
- "processor": structlog.dev.ConsoleRenderer(colors=True),
- "foreign_pre_chain": pre_chain,
- },
- "json": {
- "()": structlog.stdlib.ProcessorFormatter,
- "processor": structlog.processors.JSONRenderer(),
- "foreign_pre_chain": pre_chain,
- },
- },
- "root": {"level": "INFO", "handlers": ["console"]},
- "loggers": {
- "gunicorn.error": {
- "level": "INFO",
- "handlers": ["error_console"],
- "propagate": False,
- "qualname": "gunicorn.error",
- },
- "gunicorn.access": {
- "level": "INFO",
- "handlers": ["console"],
- "propagate": False,
- "qualname": "gunicorn.access",
- },
- },
- "handlers": {
- "error_console": {
- "class": "logging.StreamHandler",
- "formatter": LOGGING_FORMATTER_NAME,
- "stream": "ext://sys.stderr",
- },
- "console": {
- "class": "logging.StreamHandler",
- "formatter": LOGGING_FORMATTER_NAME,
- "stream": "ext://sys.stdout",
- },
- },
-}
diff --git a/mypy-baseline.txt b/mypy-baseline.txt
index 5dec5cbf9c..43e7df8b79 100644
--- a/mypy-baseline.txt
+++ b/mypy-baseline.txt
@@ -5,8 +5,6 @@ common/hogvm/stl/compile.py:0: error: Bracketed expression "[...]" is not valid
dags/backups.py:0: error: "DynamicOutput[Any]" has no attribute "map" [attr-defined]
dags/backups.py:0: error: Argument 1 to "_bucket_base_path" of "Backup" has incompatible type "str | None"; expected "str" [arg-type]
dags/backups.py:0: error: Argument 1 to "_bucket_base_path" of "Backup" has incompatible type "str | None"; expected "str" [arg-type]
-dags/backups.py:0: error: Item "None" of "Backup | None" has no attribute "shard" [union-attr]
-dags/backups.py:0: error: Item "None" of "Backup | None" has no attribute "shard" [union-attr]
dags/backups.py:0: error: Missing return statement [return]
dags/backups.py:0: error: Missing return statement [return]
dags/backups.py:0: error: Return value expected [return-value]
@@ -636,9 +634,6 @@ posthog/hogql/utils.py:0: error: Argument 2 to "_deserialize" has incompatible t
posthog/hogql/utils.py:0: error: Incompatible types in assignment (expression has type "AST | Any", variable has type "list[AST | Any]") [assignment]
posthog/hogql/visitor.py:0: error: Argument "expr" to "ThrowStatement" has incompatible type "Any | None"; expected "Expr" [arg-type]
posthog/hogql_queries/insights/funnels/base.py:0: error: Incompatible type for lookup 'pk': (got "str | int | None", expected "str | int") [misc]
-posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index]
-posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index]
-posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index]
posthog/hogql_queries/insights/trends/test/test_trends.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
posthog/hogql_queries/insights/trends/test/test_trends.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Argument 1 to "dict" has incompatible type "Any | None"; expected "SupportsKeysAndGetItem[Any, Any]" [arg-type]
diff --git a/playwright/e2e/onboarding-use-case-selection.spec.ts b/playwright/e2e/onboarding-use-case-selection.spec.ts
new file mode 100644
index 0000000000..4bc206a82e
--- /dev/null
+++ b/playwright/e2e/onboarding-use-case-selection.spec.ts
@@ -0,0 +1,65 @@
+import { expect, test } from '../utils/playwright-test-base'
+
+test.describe('Use Case Selection Onboarding', () => {
+ test.beforeEach(async ({ page, request }) => {
+ // Reset onboarding state
+ await request.patch('/api/projects/1/', {
+ data: { completed_snippet_onboarding: false },
+ headers: { Authorization: 'Bearer e2e_demo_api_key' },
+ })
+
+ // Enable the feature flag
+ await page.goto('/home')
+ await page.evaluate(() => {
+ window.posthog?.featureFlags?.override({ 'onboarding-use-case-selection': true })
+ })
+ })
+
+ test.afterAll(async ({ request }) => {
+ await request.patch('/api/projects/1/', {
+ data: { completed_snippet_onboarding: true },
+ headers: { Authorization: 'Bearer e2e_demo_api_key' },
+ })
+ })
+
+ test('displays use case selection page', async ({ page }) => {
+ await page.goto('/onboarding/use-case')
+
+ // Check for main heading
+ await expect(page.locator('h1')).toContainText('What do you want to do with PostHog?')
+
+ // Check for specific use cases
+ await expect(page.locator('text=Understand how users behave')).toBeVisible()
+ await expect(page.locator('text=Find and fix issues')).toBeVisible()
+ await expect(page.locator('text=Launch features with confidence')).toBeVisible()
+ await expect(page.locator('text=Collect user feedback')).toBeVisible()
+ await expect(page.locator('text=Monitor AI applications')).toBeVisible()
+ })
+
+ test('selects a use case and navigates to products page', async ({ page }) => {
+ await page.goto('/onboarding/use-case')
+
+ // Click on "Understand how users behave" use case
+ await page.locator('text=Understand how users behave').click()
+
+ // Should navigate to products page with useCase param
+ await expect(page).toHaveURL(/\/products\?useCase=see_user_behavior/)
+
+ // Should show recommended products pre-selected
+ await expect(page.locator('[data-attr="product_analytics-onboarding-card"]')).toHaveClass(/border-accent/)
+ await expect(page.locator('[data-attr="session_replay-onboarding-card"]')).toHaveClass(/border-accent/)
+ })
+
+ test('products page shows back button with use case selection', async ({ page }) => {
+ await page.goto('/products?useCase=fix_issues')
+
+ // Back button should be visible
+ await expect(page.locator('text=Go back to change my goal')).toBeVisible()
+
+ // Click back button
+ await page.locator('text=Go back to change my goal').click()
+
+ // Should navigate back to use case selection
+ await expect(page).toHaveURL(/\/onboarding\/use-case/)
+ })
+})
diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts
index 1e7c4b30b3..61bf029a9b 100644
--- a/plugin-server/src/config/config.ts
+++ b/plugin-server/src/config/config.ts
@@ -299,11 +299,11 @@ export function getDefaultConfig(): PluginsServerConfig {
COOKIELESS_FORCE_STATELESS_MODE: false,
COOKIELESS_DISABLED: false,
COOKIELESS_DELETE_EXPIRED_LOCAL_SALTS_INTERVAL_MS: 60 * 60 * 1000, // 1 hour
- COOKIELESS_SESSION_TTL_SECONDS: 60 * 60 * 24, // 24 hours
- COOKIELESS_SALT_TTL_SECONDS: 60 * 60 * 24, // 24 hours
+ COOKIELESS_SESSION_TTL_SECONDS: 60 * 60 * (72 + 24), // 96 hours (72 ingestion lag + 24 validity)
+ COOKIELESS_SALT_TTL_SECONDS: 60 * 60 * (72 + 24), // 96 hours (72 ingestion lag + 24 validity)
COOKIELESS_SESSION_INACTIVITY_MS: 30 * 60 * 1000, // 30 minutes
COOKIELESS_IDENTIFIES_TTL_SECONDS:
- (24 + // max supported ingestion lag
+ (72 + // max supported ingestion lag in hours
12 + // max negative timezone in the world*/
14 + // max positive timezone in the world */
24) * // amount of time salt is valid in one timezone
@@ -333,6 +333,13 @@ export function getDefaultConfig(): PluginsServerConfig {
PERSON_MERGE_ASYNC_ENABLED: false,
// Batch size for sync person merge processing (0 = unlimited, process all distinct IDs in one query)
PERSON_MERGE_SYNC_BATCH_SIZE: 0,
+ // Enable person table cutover migration
+ PERSON_TABLE_CUTOVER_ENABLED: false,
+ // New person table name for cutover migration
+ PERSON_NEW_TABLE_NAME: 'posthog_person_new',
+ // Person ID offset threshold - person IDs >= this value route to new table
+ // Default is max safe integer to ensure cutover doesn't activate accidentally
+ PERSON_NEW_TABLE_ID_OFFSET: Number.MAX_SAFE_INTEGER,
GROUP_BATCH_WRITING_MAX_CONCURRENT_UPDATES: 10,
GROUP_BATCH_WRITING_OPTIMISTIC_UPDATE_RETRY_INTERVAL_MS: 50,
diff --git a/plugin-server/src/ingestion/ai-costs/providers/canonical-providers.ts b/plugin-server/src/ingestion/ai-costs/providers/canonical-providers.ts
index 30c49bdb78..88bd3c01ed 100644
--- a/plugin-server/src/ingestion/ai-costs/providers/canonical-providers.ts
+++ b/plugin-server/src/ingestion/ai-costs/providers/canonical-providers.ts
@@ -1,5 +1,5 @@
// Auto-generated from OpenRouter API - Do not edit manually
-// Generated at: 2025-11-10 10:03:38 UTC
+// Generated at: 2025-11-13 20:02:26 UTC
export type CanonicalProvider =
| 'default'
@@ -20,6 +20,7 @@ export type CanonicalProvider =
| 'chutes'
| 'chutes-bf16'
| 'chutes-fp8'
+ | 'chutes-int4'
| 'cirrascale'
| 'clarifai-fp4'
| 'cloudflare'
@@ -40,7 +41,9 @@ export type CanonicalProvider =
| 'fireworks-fp8'
| 'friendli'
| 'gmicloud-bf16'
+ | 'gmicloud-fp4'
| 'gmicloud-fp8'
+ | 'gmicloud-int4'
| 'google-ai-studio'
| 'google-vertex'
| 'google-vertex-europe'
@@ -60,6 +63,7 @@ export type CanonicalProvider =
| 'mancer-int4'
| 'meta-fp8'
| 'minimax'
+ | 'minimax-fp8'
| 'mistral'
| 'modelrun'
| 'moonshotai'
@@ -89,6 +93,8 @@ export type CanonicalProvider =
| 'parasail-bf16'
| 'parasail-fp4'
| 'parasail-fp8'
+ | 'parasail-int4'
+ | 'parasail-int8'
| 'perplexity'
| 'phala'
| 'relace-fp8'
@@ -97,7 +103,6 @@ export type CanonicalProvider =
| 'sambanova-fp8'
| 'sambanova-turbo'
| 'siliconflow-fp8'
- | 'stealth'
| 'switchpoint'
| 'together'
| 'together-fp16'
diff --git a/plugin-server/src/ingestion/ai-costs/providers/llm-costs.json b/plugin-server/src/ingestion/ai-costs/providers/llm-costs.json
index 67b8829fe3..fe54c64b38 100644
--- a/plugin-server/src/ingestion/ai-costs/providers/llm-costs.json
+++ b/plugin-server/src/ingestion/ai-costs/providers/llm-costs.json
@@ -154,16 +154,12 @@
"model": "alpindale/goliath-120b",
"cost": {
"default": {
- "prompt_token": 0.000004,
- "completion_token": 0.0000055
+ "prompt_token": 0.000006,
+ "completion_token": 0.000008
},
"mancer-int4": {
"prompt_token": 0.000006,
"completion_token": 0.000008
- },
- "nextbit-int4": {
- "prompt_token": 0.000004,
- "completion_token": 0.0000055
}
}
},
@@ -350,13 +346,6 @@
"prompt_token": 0.000003,
"completion_token": 0.000015,
"image": 0.0048
- },
- "google-vertex": {
- "prompt_token": 0.000003,
- "completion_token": 0.000015,
- "cache_read_token": 3e-7,
- "cache_write_token": 0.00000375,
- "image": 0.0048
}
}
},
@@ -2275,12 +2264,8 @@
"model": "meta-llama/llama-3.1-405b-instruct",
"cost": {
"default": {
- "prompt_token": 8e-7,
- "completion_token": 8e-7
- },
- "deepinfra-fp8": {
- "prompt_token": 8e-7,
- "completion_token": 8e-7
+ "prompt_token": 0.0000035,
+ "completion_token": 0.0000035
},
"google-vertex": {
"prompt_token": 0.000005,
@@ -2406,16 +2391,12 @@
"model": "meta-llama/llama-3.2-1b-instruct",
"cost": {
"default": {
- "prompt_token": 5e-9,
- "completion_token": 1e-8
+ "prompt_token": 2.7e-8,
+ "completion_token": 2e-7
},
"cloudflare": {
"prompt_token": 2.7e-8,
"completion_token": 2e-7
- },
- "deepinfra-bf16": {
- "prompt_token": 5e-9,
- "completion_token": 1e-8
}
}
},
@@ -2540,7 +2521,7 @@
"prompt_token": 1.3e-7,
"completion_token": 3.9e-7
},
- "parasail-fp8": {
+ "parasail-int8": {
"prompt_token": 1.5e-7,
"completion_token": 5e-7
},
@@ -2613,10 +2594,6 @@
"completion_token": 6e-7,
"image": 0.0006684
},
- "deepinfra-turbo": {
- "prompt_token": 5e-7,
- "completion_token": 5e-7
- },
"friendli": {
"prompt_token": 2e-7,
"completion_token": 6e-7
@@ -2953,11 +2930,23 @@
"prompt_token": 2.6e-7,
"completion_token": 0.00000102
},
+ "deepinfra-fp8": {
+ "prompt_token": 2.7e-7,
+ "completion_token": 0.00000115
+ },
"fireworks": {
"prompt_token": 3e-7,
"completion_token": 0.0000012
},
- "minimax": {
+ "gmicloud-fp8": {
+ "prompt_token": 3e-7,
+ "completion_token": 0.0000012
+ },
+ "google-vertex": {
+ "prompt_token": 3e-7,
+ "completion_token": 1.2e-7
+ },
+ "minimax-fp8": {
"prompt_token": 2.55e-7,
"completion_token": 0.00000102
},
@@ -2971,23 +2960,6 @@
}
}
},
- {
- "model": "minimax/minimax-m2:free",
- "cost": {
- "default": {
- "prompt_token": 0,
- "completion_token": 0
- },
- "gmicloud-fp8": {
- "prompt_token": 0,
- "completion_token": 0
- },
- "minimax": {
- "prompt_token": 0,
- "completion_token": 0
- }
- }
- },
{
"model": "mistralai/codestral-2501",
"cost": {
@@ -3177,12 +3149,8 @@
"model": "mistralai/mistral-7b-instruct-v0.3",
"cost": {
"default": {
- "prompt_token": 2.8e-8,
- "completion_token": 5.4e-8
- },
- "deepinfra-bf16": {
- "prompt_token": 2.8e-8,
- "completion_token": 5.4e-8
+ "prompt_token": 2e-7,
+ "completion_token": 2e-7
},
"together": {
"prompt_token": 2e-7,
@@ -3633,9 +3601,9 @@
"cache_read_token": 1.5e-7
},
"moonshotai-turbo": {
- "prompt_token": 0.0000024,
- "completion_token": 0.00001,
- "cache_read_token": 6e-7
+ "prompt_token": 0.00000115,
+ "completion_token": 0.000008,
+ "cache_read_token": 1.5e-7
},
"novita-fp8": {
"prompt_token": 6e-7,
@@ -3671,8 +3639,9 @@
"completion_token": 0.0000025
},
"moonshotai-turbo": {
- "prompt_token": 0.0000024,
- "completion_token": 0.00001
+ "prompt_token": 0.00000115,
+ "completion_token": 0.000008,
+ "cache_read_token": 1.5e-7
}
}
},
@@ -3680,9 +3649,24 @@
"model": "moonshotai/kimi-k2-thinking",
"cost": {
"default": {
+ "prompt_token": 5.5e-7,
+ "completion_token": 0.00000225
+ },
+ "chutes-int4": {
+ "prompt_token": 5.5e-7,
+ "completion_token": 0.00000225
+ },
+ "deepinfra-fp8": {
+ "prompt_token": 5.5e-7,
+ "completion_token": 0.0000025
+ },
+ "fireworks": {
"prompt_token": 6e-7,
- "completion_token": 0.0000025,
- "cache_read_token": 1.5e-7
+ "completion_token": 0.0000025
+ },
+ "gmicloud-int4": {
+ "prompt_token": 8e-7,
+ "completion_token": 0.0000012
},
"moonshotai-int4": {
"prompt_token": 6e-7,
@@ -3699,9 +3683,13 @@
"completion_token": 0.0000025,
"cache_read_token": 1.5e-7
},
- "parasail-fp4": {
+ "parasail-int4": {
"prompt_token": 6e-7,
"completion_token": 0.0000025
+ },
+ "together": {
+ "prompt_token": 0.0000012,
+ "completion_token": 0.000004
}
}
},
@@ -4558,12 +4546,76 @@
}
}
},
+ {
+ "model": "openai/gpt-5.1",
+ "cost": {
+ "default": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7,
+ "web_search": 0.01
+ },
+ "openai-default": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7,
+ "web_search": 0.01
+ }
+ }
+ },
+ {
+ "model": "openai/gpt-5.1-chat",
+ "cost": {
+ "default": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7,
+ "web_search": 0.01
+ },
+ "openai": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7,
+ "web_search": 0.01
+ }
+ }
+ },
+ {
+ "model": "openai/gpt-5.1-codex",
+ "cost": {
+ "default": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7
+ },
+ "openai": {
+ "prompt_token": 0.00000125,
+ "completion_token": 0.00001,
+ "cache_read_token": 1.25e-7
+ }
+ }
+ },
+ {
+ "model": "openai/gpt-5.1-codex-mini",
+ "cost": {
+ "default": {
+ "prompt_token": 0.0000015,
+ "completion_token": 0.000006,
+ "cache_read_token": 3.75e-7
+ },
+ "openai": {
+ "prompt_token": 0.0000015,
+ "completion_token": 0.000006,
+ "cache_read_token": 3.75e-7
+ }
+ }
+ },
{
"model": "openai/gpt-oss-120b",
"cost": {
"default": {
- "prompt_token": 4e-8,
- "completion_token": 4e-7
+ "prompt_token": 0,
+ "completion_token": 0
},
"amazon-bedrock": {
"prompt_token": 1.5e-7,
@@ -4606,13 +4658,13 @@
"prompt_token": 1.5e-7,
"completion_token": 6e-7
},
- "gmicloud-bf16": {
- "prompt_token": 7e-8,
- "completion_token": 2.8e-7
+ "gmicloud-fp4": {
+ "prompt_token": 0,
+ "completion_token": 0
},
"google-vertex": {
- "prompt_token": 1.5e-7,
- "completion_token": 6e-7
+ "prompt_token": 9e-8,
+ "completion_token": 3.6e-7
},
"groq": {
"prompt_token": 1.5e-7,
@@ -4702,8 +4754,8 @@
"completion_token": 3e-7
},
"google-vertex": {
- "prompt_token": 7.5e-8,
- "completion_token": 3e-7
+ "prompt_token": 7e-8,
+ "completion_token": 2.5e-7
},
"groq": {
"prompt_token": 7.5e-8,
@@ -4980,19 +5032,6 @@
}
}
},
- {
- "model": "openrouter/polaris-alpha",
- "cost": {
- "default": {
- "prompt_token": 0,
- "completion_token": 0
- },
- "stealth": {
- "prompt_token": 0,
- "completion_token": 0
- }
- }
- },
{
"model": "perplexity/sonar",
"cost": {
@@ -5968,8 +6007,8 @@
"completion_token": 8e-7
},
"alibaba": {
- "prompt_token": 5e-7,
- "completion_token": 0.000002
+ "prompt_token": 1.5e-7,
+ "completion_token": 0.0000012
},
"atlas-cloud-fp8": {
"prompt_token": 1.5e-7,
@@ -6074,8 +6113,8 @@
"completion_token": 0.0000015
},
"parasail-fp8": {
- "prompt_token": 5e-7,
- "completion_token": 0.00000275
+ "prompt_token": 3e-7,
+ "completion_token": 0.0000019
},
"siliconflow-fp8": {
"prompt_token": 3e-7,
@@ -6169,6 +6208,10 @@
"prompt_token": 1.8e-7,
"completion_token": 6.9e-7
},
+ "nextbit-bf16": {
+ "prompt_token": 1.2e-7,
+ "completion_token": 7e-7
+ },
"novita-fp8": {
"prompt_token": 8e-8,
"completion_token": 5e-7
@@ -6383,6 +6426,10 @@
"prompt_token": 3e-7,
"completion_token": 5e-7
},
+ "nextbit-bf16": {
+ "prompt_token": 3e-7,
+ "completion_token": 5e-7
+ },
"parasail-bf16": {
"prompt_token": 3e-7,
"completion_token": 5e-7
@@ -6657,7 +6704,7 @@
"cost": {
"default": {
"prompt_token": 3.5e-7,
- "completion_token": 0.00000155
+ "completion_token": 0.0000015
},
"chutes-bf16": {
"prompt_token": 3.5e-7,
@@ -6669,7 +6716,7 @@
},
"mancer-fp8": {
"prompt_token": 3.5e-7,
- "completion_token": 0.000002
+ "completion_token": 0.0000015
},
"nebius-fp8": {
"prompt_token": 6e-7,
@@ -6785,11 +6832,6 @@
"prompt_token": 4e-7,
"completion_token": 0.00000175
},
- "deepinfra-fp4": {
- "prompt_token": 4.5e-7,
- "completion_token": 0.0000019,
- "cache_read_token": 1.1e-7
- },
"fireworks": {
"prompt_token": 5.5e-7,
"completion_token": 0.0000021899999999999998
@@ -6834,12 +6876,8 @@
"model": "z-ai/glm-4.6:exacto",
"cost": {
"default": {
- "prompt_token": 4.5e-7,
- "completion_token": 0.0000019
- },
- "deepinfra-fp4": {
- "prompt_token": 4.5e-7,
- "completion_token": 0.0000019
+ "prompt_token": 6e-7,
+ "completion_token": 0.0000022
},
"novita-bf16": {
"prompt_token": 6e-7,
diff --git a/plugin-server/src/ingestion/cookieless/cookieless-manager.test.ts b/plugin-server/src/ingestion/cookieless/cookieless-manager.test.ts
index 2dd43f6c73..f6c925a5ed 100644
--- a/plugin-server/src/ingestion/cookieless/cookieless-manager.test.ts
+++ b/plugin-server/src/ingestion/cookieless/cookieless-manager.test.ts
@@ -22,6 +22,7 @@ import {
extractRootDomain,
getRedisIdentifiesKey,
hashToDistinctId,
+ isCalendarDateValid,
sessionStateToBuffer,
toYYYYMMDDInTimezoneSafe,
} from './cookieless-manager'
@@ -113,6 +114,59 @@ describe('CookielessManager', () => {
})
})
+ describe('isCalendarDateValid', () => {
+ const fixedTime = new Date('2025-11-13T12:00:00Z')
+
+ beforeEach(() => {
+ jest.useFakeTimers({ now: fixedTime })
+ })
+
+ afterEach(() => {
+ jest.useRealTimers()
+ })
+
+ it('should accept today', () => {
+ // Fixed time: 2025-11-13 12:00 UTC
+ expect(isCalendarDateValid('2025-11-13')).toBe(true)
+ })
+
+ it('should accept yesterday', () => {
+ // Salt window for 2025-11-12: Nov 11 12:00 to Nov 15 14:00
+ // NOW (Nov 13 12:00) is within window
+ expect(isCalendarDateValid('2025-11-12')).toBe(true)
+ })
+
+ it('should accept 3 days ago (within 72h + timezone buffer)', () => {
+ // Salt window for 2025-11-10: Nov 9 12:00 to Nov 13 14:00
+ // NOW (Nov 13 12:00) is within window
+ expect(isCalendarDateValid('2025-11-10')).toBe(true)
+ })
+
+ it('should reject 4 days ago (salt window expired)', () => {
+ // Salt window for 2025-11-09: Nov 8 12:00 to Nov 12 14:00
+ // NOW (Nov 13 12:00) is after window ended
+ expect(isCalendarDateValid('2025-11-09')).toBe(false)
+ })
+
+ it('should reject 5 days ago (salt window expired)', () => {
+ // Salt window for 2025-11-08: Nov 7 12:00 to Nov 11 14:00
+ // NOW (Nov 13 12:00) is well after window ended
+ expect(isCalendarDateValid('2025-11-08')).toBe(false)
+ })
+
+ it('should reject tomorrow-ish dates', () => {
+ // Salt window for 2025-11-08: Nov 7 12:00 to Nov 11 14:00
+ // NOW (Nov 13 12:00) is well after window ended
+ expect(isCalendarDateValid('2025-11-15')).toBe(false)
+ })
+
+ it('should reject invalid date format', () => {
+ expect(isCalendarDateValid('not-a-date')).toBe(false)
+ expect(isCalendarDateValid('2025/01/01')).toBe(false)
+ expect(isCalendarDateValid('2025-13-01')).toBe(false)
+ })
+ })
+
describe('pipeline step', () => {
let hub: Hub
let organizationId: string
diff --git a/plugin-server/src/ingestion/cookieless/cookieless-manager.ts b/plugin-server/src/ingestion/cookieless/cookieless-manager.ts
index 2bdc1bdbe5..f55f7e3bae 100644
--- a/plugin-server/src/ingestion/cookieless/cookieless-manager.ts
+++ b/plugin-server/src/ingestion/cookieless/cookieless-manager.ts
@@ -83,6 +83,7 @@ export const COOKIELESS_MODE_FLAG_PROPERTY = '$cookieless_mode'
export const COOKIELESS_EXTRA_HASH_CONTENTS_PROPERTY = '$cookieless_extra'
const MAX_NEGATIVE_TIMEZONE_HOURS = 12
const MAX_POSITIVE_TIMEZONE_HOURS = 14
+const MAX_SUPPORTED_INGESTION_LAG_HOURS = 72 // if changing this, you will also need to change the TTLs
interface CookielessConfig {
disabled: boolean
@@ -683,7 +684,7 @@ export function isCalendarDateValid(yyyymmdd: string): boolean {
startOfDayMinus12.setUTCHours(-MAX_NEGATIVE_TIMEZONE_HOURS) // Start at UTC−12
const endOfDayPlus14 = new Date(utcDate)
- endOfDayPlus14.setUTCHours(MAX_POSITIVE_TIMEZONE_HOURS + 24) // End at UTC+14
+ endOfDayPlus14.setUTCHours(MAX_POSITIVE_TIMEZONE_HOURS + MAX_SUPPORTED_INGESTION_LAG_HOURS) // End at UTC+14 (72h ingestion lag buffer)
const isGteMinimum = nowUTC >= startOfDayMinus12
const isLtMaximum = nowUTC < endOfDayPlus14
diff --git a/plugin-server/src/ingestion/ingestion-e2e.test.ts b/plugin-server/src/ingestion/ingestion-e2e.test.ts
index 92d42b6c39..fcad86a4b6 100644
--- a/plugin-server/src/ingestion/ingestion-e2e.test.ts
+++ b/plugin-server/src/ingestion/ingestion-e2e.test.ts
@@ -242,7 +242,7 @@ describe('Event Pipeline E2E tests', () => {
expect(warnings).toEqual([
expect.objectContaining({
type: 'client_ingestion_warning',
- team_id: team.id.toString(),
+ team_id: team.id,
details: expect.objectContaining({ message: 'test message' }),
}),
])
@@ -259,7 +259,7 @@ describe('Event Pipeline E2E tests', () => {
await waitForExpect(async () => {
const events = await fetchEvents(hub, team.id)
expect(events.length).toBe(1)
- expect(events[0].team_id).toBe(team.id.toString())
+ expect(events[0].team_id).toBe(team.id)
})
})
@@ -1239,350 +1239,360 @@ describe('Event Pipeline E2E tests', () => {
return queryResult.map((warning: any) => ({ ...warning, details: parseJSON(warning.details) }))
}
- testWithTeamIngester('alias events ordering scenario 1: original order', {}, async (ingester, hub, team) => {
- const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
- const user1DistinctId = 'user1-distinct-id'
- const user2DistinctId = 'user2-distinct-id'
- const user3DistinctId = 'user3-distinct-id'
+ // TODO: Re-enable after table cutover is complete and FK constraints are restored
+ // Temporarily skipped because we removed the FK constraint from posthog_persondistinctid -> posthog_person
+ // to allow writes to both old and new tables during migration
+ test.skip('alias events ordering scenario 1: original order', () => {})
+ // testWithTeamIngester('alias events ordering scenario 1: original order', {}, async (ingester, hub, team) => {
+ // const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
+ // const user1DistinctId = 'user1-distinct-id'
+ // const user2DistinctId = 'user2-distinct-id'
+ // const user3DistinctId = 'user3-distinct-id'
- const events = [
- // User 1 creation
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- name: 'User 1',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 1 - Updated',
- },
- })
- .build(),
- // User 2 creation
- new EventBuilder(team, user2DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- name: 'User 2',
- email: `user2-${user2DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user2DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 2 - Updated',
- },
- })
- .build(),
- // Merge users: alias user1 -> user2
- new EventBuilder(team, user1DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user1DistinctId,
- alias: user2DistinctId,
- })
- .build(),
+ // const events = [
+ // // User 1 creation
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // name: 'User 1',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 1 - Updated',
+ // },
+ // })
+ // .build(),
+ // // User 2 creation
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // name: 'User 2',
+ // email: `user2-${user2DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 2 - Updated',
+ // },
+ // })
+ // .build(),
+ // // Merge users: alias user1 -> user2
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user1DistinctId,
+ // alias: user2DistinctId,
+ // })
+ // .build(),
- // Create alias for user2 -> user3
- new EventBuilder(team, user2DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user2DistinctId,
- alias: user3DistinctId,
- })
- .build(),
- ]
+ // // Create alias for user2 -> user3
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user2DistinctId,
+ // alias: user3DistinctId,
+ // })
+ // .build(),
+ // ]
- await ingester.handleKafkaBatch(createKafkaMessages(events))
- await waitForKafkaMessages(hub)
+ // await ingester.handleKafkaBatch(createKafkaMessages(events))
+ // await waitForKafkaMessages(hub)
- await waitForExpect(async () => {
- const events = await fetchEvents(hub, team.id)
- expect(events.length).toBe(6)
+ // await waitForExpect(async () => {
+ // const events = await fetchEvents(hub, team.id)
+ // expect(events.length).toBe(6)
- // TODO: Add specific assertions based on expected behavior
- // All events should be processed without errors
- expect(events).toBeDefined()
- })
+ // // TODO: Add specific assertions based on expected behavior
+ // // All events should be processed without errors
+ // expect(events).toBeDefined()
+ // })
- // fetch the person properties
- await waitForExpect(async () => {
- const persons = await fetchPostgresPersons(hub.db, team.id)
- expect(persons.length).toBe(1)
- const personsClickhouse = await fetchPersons(hub, team.id)
- expect(personsClickhouse.length).toBe(1)
- expect(persons[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- expect(personsClickhouse[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
- id: persons[0].id,
- team_id: team.id,
- } as InternalPerson)
- expect(distinctIdsPersons.length).toBe(3)
- // Except distinctids to match the ids, in any order
- expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
- expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
- )
- })
- })
+ // // fetch the person properties
+ // await waitForExpect(async () => {
+ // const persons = await fetchPostgresPersons(hub.db, team.id)
+ // expect(persons.length).toBe(1)
+ // const personsClickhouse = await fetchPersons(hub, team.id)
+ // expect(personsClickhouse.length).toBe(1)
+ // expect(persons[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // expect(personsClickhouse[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
+ // id: persons[0].id,
+ // team_id: team.id,
+ // } as InternalPerson)
+ // expect(distinctIdsPersons.length).toBe(3)
+ // // Except distinctids to match the ids, in any order
+ // expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
+ // expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
+ // )
+ // })
+ // })
- testWithTeamIngester('alias events ordering scenario 2: alias first', {}, async (ingester, hub, team) => {
- const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
- const user1DistinctId = 'user1-distinct-id'
- const user2DistinctId = 'user2-distinct-id'
- const user3DistinctId = 'user3-distinct-id'
+ // TODO: Re-enable after table cutover is complete and FK constraints are restored
+ // Temporarily skipped because we removed the FK constraint from posthog_persondistinctid -> posthog_person
+ test.skip('alias events ordering scenario 2: alias first', () => {})
+ // testWithTeamIngester('alias events ordering scenario 2: alias first', {}, async (ingester, hub, team) => {
+ // const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
+ // const user1DistinctId = 'user1-distinct-id'
+ // const user2DistinctId = 'user2-distinct-id'
+ // const user3DistinctId = 'user3-distinct-id'
- const events = [
- // User 1 creation
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- name: 'User 1',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 1 - Updated',
- },
- })
- .build(),
- // User 2 creation
- new EventBuilder(team, user2DistinctId)
- .withProperties({
- anon_distinct_id: user2DistinctId,
- $set: {
- name: 'User 2',
- email: `user2-${user2DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user2DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 2 - Updated',
- },
- })
- .build(),
+ // const events = [
+ // // User 1 creation
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // name: 'User 1',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 1 - Updated',
+ // },
+ // })
+ // .build(),
+ // // User 2 creation
+ // new EventBuilder(team, user2DistinctId)
+ // .withProperties({
+ // anon_distinct_id: user2DistinctId,
+ // $set: {
+ // name: 'User 2',
+ // email: `user2-${user2DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 2 - Updated',
+ // },
+ // })
+ // .build(),
- // Create alias for user2 -> user3
- new EventBuilder(team, user2DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user2DistinctId,
- alias: user3DistinctId,
- })
- .build(),
+ // // Create alias for user2 -> user3
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user2DistinctId,
+ // alias: user3DistinctId,
+ // })
+ // .build(),
- // Merge users: alias user1 -> user2
- new EventBuilder(team, user1DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user1DistinctId,
- alias: user2DistinctId,
- })
- .build(),
- ]
+ // // Merge users: alias user1 -> user2
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user1DistinctId,
+ // alias: user2DistinctId,
+ // })
+ // .build(),
+ // ]
- await ingester.handleKafkaBatch(createKafkaMessages(events))
- await waitForKafkaMessages(hub)
+ // await ingester.handleKafkaBatch(createKafkaMessages(events))
+ // await waitForKafkaMessages(hub)
- await waitForExpect(async () => {
- const events = await fetchEvents(hub, team.id)
- expect(events.length).toBe(6)
+ // await waitForExpect(async () => {
+ // const events = await fetchEvents(hub, team.id)
+ // expect(events.length).toBe(6)
- // TODO: Add specific assertions based on expected behavior
- // All events should be processed without errors
- expect(events).toBeDefined()
- })
+ // // TODO: Add specific assertions based on expected behavior
+ // // All events should be processed without errors
+ // expect(events).toBeDefined()
+ // })
- // fetch the person properties
- await waitForExpect(async () => {
- const persons = await fetchPostgresPersons(hub.db, team.id)
- expect(persons.length).toBe(1)
- const personsClickhouse = await fetchPersons(hub, team.id)
- expect(personsClickhouse.length).toBe(1)
- expect(persons[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- expect(personsClickhouse[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
- id: persons[0].id,
- team_id: team.id,
- } as InternalPerson)
- expect(distinctIdsPersons.length).toBe(3)
- // Except distinctids to match the ids, in any order
- expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
- expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
- )
- })
- })
+ // // fetch the person properties
+ // await waitForExpect(async () => {
+ // const persons = await fetchPostgresPersons(hub.db, team.id)
+ // expect(persons.length).toBe(1)
+ // const personsClickhouse = await fetchPersons(hub, team.id)
+ // expect(personsClickhouse.length).toBe(1)
+ // expect(persons[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // expect(personsClickhouse[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
+ // id: persons[0].id,
+ // team_id: team.id,
+ // } as InternalPerson)
+ // expect(distinctIdsPersons.length).toBe(3)
+ // // Except distinctids to match the ids, in any order
+ // expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
+ // expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
+ // )
+ // })
+ // })
- testWithTeamIngester('alias events ordering scenario 2: user 2 first', {}, async (ingester, hub, team) => {
- const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
- const user1DistinctId = 'user1-distinct-id'
- const user2DistinctId = 'user2-distinct-id'
- const user3DistinctId = 'user3-distinct-id'
+ // TODO: Re-enable after table cutover is complete and FK constraints are restored
+ // Temporarily skipped because we removed the FK constraint from posthog_persondistinctid -> posthog_person
+ test.skip('alias events ordering scenario 2: user 2 first', () => {})
+ // testWithTeamIngester('alias events ordering scenario 2: user 2 first', {}, async (ingester, hub, team) => {
+ // const testName = DateTime.now().toFormat('yyyy-MM-dd-HH-mm-ss')
+ // const user1DistinctId = 'user1-distinct-id'
+ // const user2DistinctId = 'user2-distinct-id'
+ // const user3DistinctId = 'user3-distinct-id'
- const events = [
- // User 2 creation
- new EventBuilder(team, user2DistinctId)
- .withProperties({
- anon_distinct_id: user2DistinctId,
- $set: {
- name: 'User 2',
- email: `user2-${user2DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user2DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 2 - Updated',
- },
- })
- .build(),
+ // const events = [
+ // // User 2 creation
+ // new EventBuilder(team, user2DistinctId)
+ // .withProperties({
+ // anon_distinct_id: user2DistinctId,
+ // $set: {
+ // name: 'User 2',
+ // email: `user2-${user2DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 2 - Updated',
+ // },
+ // })
+ // .build(),
- // Create alias for user2 -> user3
- new EventBuilder(team, user2DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user2DistinctId,
- alias: user3DistinctId,
- })
- .build(),
+ // // Create alias for user2 -> user3
+ // new EventBuilder(team, user2DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user2DistinctId,
+ // alias: user3DistinctId,
+ // })
+ // .build(),
- // User 1 creation
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- name: 'User 1',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- },
- })
- .build(),
- new EventBuilder(team, user1DistinctId)
- .withEvent('$identify')
- .withProperties({
- $set: {
- new_name: 'User 1 - Updated',
- },
- })
- .build(),
+ // // User 1 creation
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // name: 'User 1',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // },
+ // })
+ // .build(),
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$identify')
+ // .withProperties({
+ // $set: {
+ // new_name: 'User 1 - Updated',
+ // },
+ // })
+ // .build(),
- // Merge users: alias user1 -> user2
- new EventBuilder(team, user1DistinctId)
- .withEvent('$create_alias')
- .withProperties({
- distinct_id: user1DistinctId,
- alias: user2DistinctId,
- })
- .build(),
- ]
+ // // Merge users: alias user1 -> user2
+ // new EventBuilder(team, user1DistinctId)
+ // .withEvent('$create_alias')
+ // .withProperties({
+ // distinct_id: user1DistinctId,
+ // alias: user2DistinctId,
+ // })
+ // .build(),
+ // ]
- await ingester.handleKafkaBatch(createKafkaMessages(events))
- await waitForKafkaMessages(hub)
+ // await ingester.handleKafkaBatch(createKafkaMessages(events))
+ // await waitForKafkaMessages(hub)
- await waitForExpect(async () => {
- const events = await fetchEvents(hub, team.id)
- expect(events.length).toBe(6)
+ // await waitForExpect(async () => {
+ // const events = await fetchEvents(hub, team.id)
+ // expect(events.length).toBe(6)
- // TODO: Add specific assertions based on expected behavior
- // All events should be processed without errors
- expect(events).toBeDefined()
- })
+ // // TODO: Add specific assertions based on expected behavior
+ // // All events should be processed without errors
+ // expect(events).toBeDefined()
+ // })
- // fetch the person properties
- await waitForExpect(async () => {
- const persons = await fetchPostgresPersons(hub.db, team.id)
- expect(persons.length).toBe(1)
- const personsClickhouse = await fetchPersons(hub, team.id)
- expect(personsClickhouse.length).toBe(1)
- expect(persons[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- expect(personsClickhouse[0].properties).toMatchObject(
- expect.objectContaining({
- name: 'User 1',
- new_name: 'User 1 - Updated',
- email: `user1-${user1DistinctId}@example.com`,
- age: 30,
- test_name: testName,
- })
- )
- const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
- id: persons[0].id,
- team_id: team.id,
- } as InternalPerson)
- expect(distinctIdsPersons.length).toBe(3)
- // Except distinctids to match the ids, in any order
- expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
- expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
- )
- })
- })
+ // // fetch the person properties
+ // await waitForExpect(async () => {
+ // const persons = await fetchPostgresPersons(hub.db, team.id)
+ // expect(persons.length).toBe(1)
+ // const personsClickhouse = await fetchPersons(hub, team.id)
+ // expect(personsClickhouse.length).toBe(1)
+ // expect(persons[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // expect(personsClickhouse[0].properties).toMatchObject(
+ // expect.objectContaining({
+ // name: 'User 1',
+ // new_name: 'User 1 - Updated',
+ // email: `user1-${user1DistinctId}@example.com`,
+ // age: 30,
+ // test_name: testName,
+ // })
+ // )
+ // const distinctIdsPersons = await fetchDistinctIds(hub.db.postgres, {
+ // id: persons[0].id,
+ // team_id: team.id,
+ // } as InternalPerson)
+ // expect(distinctIdsPersons.length).toBe(3)
+ // // Except distinctids to match the ids, in any order
+ // expect(distinctIdsPersons.map((distinctId) => distinctId.distinct_id)).toEqual(
+ // expect.arrayContaining([user1DistinctId, user2DistinctId, user3DistinctId])
+ // )
+ // })
+ // })
testWithTeamIngester(
'alias events ordering scenario 2: user 2 first, separate batch',
diff --git a/plugin-server/src/llm-analytics/services/temporal.service.test.ts b/plugin-server/src/llm-analytics/services/temporal.service.test.ts
index 8c95bcbfb6..f2751ffd4b 100644
--- a/plugin-server/src/llm-analytics/services/temporal.service.test.ts
+++ b/plugin-server/src/llm-analytics/services/temporal.service.test.ts
@@ -1,12 +1,27 @@
import { Client, Connection } from '@temporalio/client'
-import { Hub } from '~/types'
+import { Hub, RawKafkaEvent } from '~/types'
import { closeHub, createHub } from '~/utils/db/hub'
import { TemporalService } from './temporal.service'
jest.mock('@temporalio/client')
+const createMockEvent = (overrides: Partial = {}): RawKafkaEvent => {
+ return {
+ uuid: 'event-456',
+ event: '$ai_generation',
+ properties: '{}',
+ timestamp: '2024-01-01T00:00:00Z',
+ team_id: 1,
+ distinct_id: 'test-user',
+ elements_chain: '',
+ created_at: '2024-01-01T00:00:00Z',
+ project_id: 1,
+ ...overrides,
+ } as RawKafkaEvent
+}
+
describe('TemporalService', () => {
let hub: Hub
let service: TemporalService
@@ -47,7 +62,7 @@ describe('TemporalService', () => {
describe('connection management', () => {
it('creates client with correct config', async () => {
- await service.startEvaluationRunWorkflow('test', 'test', '2024-01-01T00:00:00Z')
+ await service.startEvaluationRunWorkflow('test', createMockEvent({ uuid: 'test-uuid' }))
expect(Connection.connect).toHaveBeenCalledWith({
address: 'localhost:7233',
@@ -61,7 +76,7 @@ describe('TemporalService', () => {
hub.TEMPORAL_CLIENT_KEY = 'client-key'
const newService = new TemporalService(hub)
- await newService.startEvaluationRunWorkflow('test', 'test', '2024-01-01T00:00:00Z')
+ await newService.startEvaluationRunWorkflow('test', createMockEvent({ uuid: 'test-uuid' }))
expect(Connection.connect).toHaveBeenCalledWith({
address: 'localhost:7233',
@@ -76,7 +91,7 @@ describe('TemporalService', () => {
})
it('disconnects client properly', async () => {
- await service.startEvaluationRunWorkflow('test', 'test', '2024-01-01T00:00:00Z')
+ await service.startEvaluationRunWorkflow('test', createMockEvent({ uuid: 'test-uuid' }))
await service.disconnect()
expect(mockConnection.close).toHaveBeenCalled()
@@ -85,7 +100,9 @@ describe('TemporalService', () => {
describe('workflow triggering', () => {
it('starts evaluation run workflow with correct parameters', async () => {
- await service.startEvaluationRunWorkflow('eval-123', 'event-456', '2024-01-01T00:00:00Z')
+ const mockEvent = createMockEvent({ properties: { $ai_input: 'test', $ai_output: 'response' } as any })
+
+ await service.startEvaluationRunWorkflow('eval-123', mockEvent)
expect(mockClient.workflow.start).toHaveBeenCalledWith('run-evaluation', {
taskQueue: 'general-purpose-task-queue',
@@ -94,16 +111,17 @@ describe('TemporalService', () => {
args: [
{
evaluation_id: 'eval-123',
- target_event_id: 'event-456',
- timestamp: '2024-01-01T00:00:00Z',
+ event_data: mockEvent,
},
],
})
})
it('generates deterministic workflow IDs', async () => {
- await service.startEvaluationRunWorkflow('eval-123', 'event-456', '2024-01-01T00:00:00Z')
- await service.startEvaluationRunWorkflow('eval-123', 'event-456', '2024-01-01T00:00:00Z')
+ const mockEvent = createMockEvent()
+
+ await service.startEvaluationRunWorkflow('eval-123', mockEvent)
+ await service.startEvaluationRunWorkflow('eval-123', mockEvent)
const calls = (mockClient.workflow.start as jest.Mock).mock.calls
const workflowId1 = calls[0][1].workflowId
@@ -114,8 +132,11 @@ describe('TemporalService', () => {
})
it('generates different workflow IDs for different events', async () => {
- await service.startEvaluationRunWorkflow('eval-123', 'event-1', '2024-01-01T00:00:00Z')
- await service.startEvaluationRunWorkflow('eval-123', 'event-2', '2024-01-01T00:00:00Z')
+ const mockEvent1 = createMockEvent({ uuid: 'event-1' })
+ const mockEvent2 = createMockEvent({ uuid: 'event-2' })
+
+ await service.startEvaluationRunWorkflow('eval-123', mockEvent1)
+ await service.startEvaluationRunWorkflow('eval-123', mockEvent2)
const calls = (mockClient.workflow.start as jest.Mock).mock.calls
const workflowId1 = calls[0][1].workflowId
@@ -127,7 +148,7 @@ describe('TemporalService', () => {
})
it('returns workflow handle on success', async () => {
- const handle = await service.startEvaluationRunWorkflow('eval-123', 'event-456', '2024-01-01T00:00:00Z')
+ const handle = await service.startEvaluationRunWorkflow('eval-123', createMockEvent())
expect(handle).toBeDefined()
expect(handle).toBe(mockWorkflowHandle)
@@ -136,9 +157,9 @@ describe('TemporalService', () => {
it('throws on workflow start failure', async () => {
;(mockClient.workflow.start as jest.Mock).mockRejectedValue(new Error('Temporal unavailable'))
- await expect(
- service.startEvaluationRunWorkflow('eval-123', 'event-456', '2024-01-01T00:00:00Z')
- ).rejects.toThrow('Temporal unavailable')
+ await expect(service.startEvaluationRunWorkflow('eval-123', createMockEvent())).rejects.toThrow(
+ 'Temporal unavailable'
+ )
})
})
})
diff --git a/plugin-server/src/llm-analytics/services/temporal.service.ts b/plugin-server/src/llm-analytics/services/temporal.service.ts
index 64558e7be3..74dcb04064 100644
--- a/plugin-server/src/llm-analytics/services/temporal.service.ts
+++ b/plugin-server/src/llm-analytics/services/temporal.service.ts
@@ -2,7 +2,7 @@ import { Client, Connection, TLSConfig, WorkflowHandle } from '@temporalio/clien
import fs from 'fs/promises'
import { Counter } from 'prom-client'
-import { Hub } from '../../types'
+import { Hub, RawKafkaEvent } from '../../types'
import { isDevEnv } from '../../utils/env-utils'
import { logger } from '../../utils/logger'
@@ -93,21 +93,16 @@ export class TemporalService {
return client
}
- async startEvaluationRunWorkflow(
- evaluationId: string,
- targetEventId: string,
- timestamp: string
- ): Promise {
+ async startEvaluationRunWorkflow(evaluationId: string, event: RawKafkaEvent): Promise {
const client = await this.ensureConnected()
- const workflowId = `${evaluationId}-${targetEventId}-ingestion`
+ const workflowId = `${evaluationId}-${event.uuid}-ingestion`
const handle = await client.workflow.start('run-evaluation', {
args: [
{
evaluation_id: evaluationId,
- target_event_id: targetEventId,
- timestamp: timestamp,
+ event_data: event,
},
],
taskQueue: EVALUATION_TASK_QUEUE,
@@ -120,8 +115,8 @@ export class TemporalService {
logger.debug('Started evaluation run workflow', {
workflowId,
evaluationId,
- targetEventId,
- timestamp,
+ targetEventId: event.uuid,
+ timestamp: event.timestamp,
})
return handle
diff --git a/plugin-server/src/logs-ingestion/logs-ingestion-consumer.ts b/plugin-server/src/logs-ingestion/logs-ingestion-consumer.ts
index 311ef706e6..233a0681f4 100644
--- a/plugin-server/src/logs-ingestion/logs-ingestion-consumer.ts
+++ b/plugin-server/src/logs-ingestion/logs-ingestion-consumer.ts
@@ -114,7 +114,7 @@ export class LogsIngestionConsumer {
if (!team) {
// Write to DLQ topic maybe?
- logger.error('team_not_found')
+ logger.error('team_not_found', { token_with_no_team: token })
logMessageDroppedCounter.inc({ reason: 'team_not_found' })
return
}
diff --git a/plugin-server/src/main/ingestion-queues/evaluation-scheduler.ts b/plugin-server/src/main/ingestion-queues/evaluation-scheduler.ts
index 58b8645392..5d942d451b 100644
--- a/plugin-server/src/main/ingestion-queues/evaluation-scheduler.ts
+++ b/plugin-server/src/main/ingestion-queues/evaluation-scheduler.ts
@@ -289,6 +289,6 @@ async function processEventEvaluationMatch(
evaluationMatchesCounter.labels({ outcome: 'matched' }).inc()
- await temporalService.startEvaluationRunWorkflow(evaluationDefinition.id, event.uuid, event.timestamp)
+ await temporalService.startEvaluationRunWorkflow(evaluationDefinition.id, event)
evaluationSchedulerEventsProcessed.labels({ status: 'success' }).inc()
}
diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts
index 3c2131e756..7e8b0cc6c8 100644
--- a/plugin-server/src/types.ts
+++ b/plugin-server/src/types.ts
@@ -274,6 +274,12 @@ export interface PluginsServerConfig extends CdpConfig, IngestionConsumerConfig,
PERSON_MERGE_ASYNC_ENABLED: boolean
// Batch size for sync person merge processing (0 = unlimited)
PERSON_MERGE_SYNC_BATCH_SIZE: number
+ // Enable person table cutover migration
+ PERSON_TABLE_CUTOVER_ENABLED: boolean
+ // New person table name for cutover migration
+ PERSON_NEW_TABLE_NAME: string
+ // Person ID offset threshold - person IDs >= this value route to new table
+ PERSON_NEW_TABLE_ID_OFFSET: number
GROUP_BATCH_WRITING_MAX_CONCURRENT_UPDATES: number // maximum number of concurrent updates to groups table per batch
GROUP_BATCH_WRITING_MAX_OPTIMISTIC_UPDATE_RETRIES: number // maximum number of retries for optimistic update
GROUP_BATCH_WRITING_OPTIMISTIC_UPDATE_RETRY_INTERVAL_MS: number // starting interval for exponential backoff between retries for optimistic update
@@ -1006,6 +1012,18 @@ export interface RawPerson extends BasePerson {
export interface InternalPerson extends BasePerson {
created_at: DateTime
version: number
+ /** Internal flag to track which table this person exists in during cutover migration */
+ __useNewTable?: boolean
+}
+
+/** Mutable fields that can be updated on a Person via updatePerson. */
+export interface PersonUpdateFields {
+ properties: Properties
+ properties_last_updated_at: PropertiesLastUpdatedAt
+ properties_last_operation: PropertiesLastOperation | null
+ is_identified: boolean
+ created_at: DateTime
+ version?: number // Optional: allows forcing a specific version (used for dual-write sync)
}
/** Person model exposed outside of person-specific DB logic. */
diff --git a/plugin-server/src/utils/db/elements-chain.ts b/plugin-server/src/utils/db/elements-chain.ts
index 9f1dace6f8..05b1e5b891 100644
--- a/plugin-server/src/utils/db/elements-chain.ts
+++ b/plugin-server/src/utils/db/elements-chain.ts
@@ -8,7 +8,11 @@ const splitChainRegex = createTrackedRE2(/(?:[^\s;"]|"(?:[^"\\]|\\.)*")+/g, unde
// Below splits the tag/classes from attributes
// Needs a regex because classes can have : too
const splitClassAttributes = createTrackedRE2(/(.*?)($|:([a-zA-Z\-_0-9]*=.*))/g, undefined, 'elements-chain:splitClass')
-const parseAttributesRegex = createTrackedRE2(/((.*?)="(.*?[^\\])")/gm, undefined, 'elements-chain:parseAttributes')
+const parseAttributesRegex = createTrackedRE2(
+ /((.*?)="((?:\\"|[^"])*)")/gm,
+ undefined,
+ 'elements-chain:parseAttributes'
+)
const newLine = createTrackedRE2(/\\n/g, undefined, 'elements-chain:newLine')
export function elementsToString(elements: Element[]): string {
diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts
index c0e3365763..f1eeec84c8 100644
--- a/plugin-server/src/utils/db/hub.ts
+++ b/plugin-server/src/utils/db/hub.ts
@@ -145,6 +145,9 @@ export async function createHub(
const personRepositoryOptions = {
calculatePropertiesSize: serverConfig.PERSON_UPDATE_CALCULATE_PROPERTIES_SIZE,
comparisonEnabled: serverConfig.PERSONS_DUAL_WRITE_COMPARISON_ENABLED,
+ tableCutoverEnabled: serverConfig.PERSON_TABLE_CUTOVER_ENABLED,
+ newTableName: serverConfig.PERSON_NEW_TABLE_NAME,
+ newTableIdOffset: serverConfig.PERSON_NEW_TABLE_ID_OFFSET,
}
const personRepository = serverConfig.PERSONS_DUAL_WRITE_ENABLED
? new PostgresDualWritePersonRepository(postgres, postgresPersonMigration, personRepositoryOptions)
diff --git a/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.test.ts b/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.test.ts
index 9b78a3676d..07632a6f28 100644
--- a/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.test.ts
+++ b/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.test.ts
@@ -942,13 +942,14 @@ describe('BatchWritingPersonStore', () => {
prop_from_distinctId2: 'value2',
},
}),
+ // Only mutable fields should be in the update object
expect.objectContaining({
- id: sharedPerson.id,
properties: {
initial_prop: 'initial_value',
prop_from_distinctId1: 'value1',
prop_from_distinctId2: 'value2',
},
+ is_identified: expect.any(Boolean),
}),
'updatePersonNoAssert'
)
@@ -1275,10 +1276,11 @@ describe('BatchWritingPersonStore', () => {
const testPersonStore = new BatchWritingPersonsStore(mockRepo, db.kafkaProducer)
const personStoreForBatch = testPersonStore.forBatch() as BatchWritingPersonsStoreForBatch
const personId = 'test-person-id'
+ const teamId = 1
- const result = await personStoreForBatch.personPropertiesSize(personId)
+ const result = await personStoreForBatch.personPropertiesSize(personId, teamId)
- expect(mockRepo.personPropertiesSize).toHaveBeenCalledWith(personId)
+ expect(mockRepo.personPropertiesSize).toHaveBeenCalledWith(personId, teamId)
expect(result).toBe(1024)
})
@@ -1288,10 +1290,11 @@ describe('BatchWritingPersonStore', () => {
const testPersonStore = new BatchWritingPersonsStore(mockRepo, db.kafkaProducer)
const personStoreForBatch = testPersonStore.forBatch() as BatchWritingPersonsStoreForBatch
const personId = 'test-person-id'
+ const teamId = 1
- const result = await personStoreForBatch.personPropertiesSize(personId)
+ const result = await personStoreForBatch.personPropertiesSize(personId, teamId)
- expect(mockRepo.personPropertiesSize).toHaveBeenCalledWith(personId)
+ expect(mockRepo.personPropertiesSize).toHaveBeenCalledWith(personId, teamId)
expect(result).toBe(0)
})
})
diff --git a/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.ts b/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.ts
index 7c697511b3..8072f8339b 100644
--- a/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.ts
+++ b/plugin-server/src/worker/ingestion/persons/batch-writing-person-store.ts
@@ -668,8 +668,8 @@ export class BatchWritingPersonsStoreForBatch implements PersonsStoreForBatch, B
return await (tx || this.personRepository).addPersonlessDistinctIdForMerge(teamId, distinctId)
}
- async personPropertiesSize(personId: string): Promise {
- return await this.personRepository.personPropertiesSize(personId)
+ async personPropertiesSize(personId: string, teamId: number): Promise {
+ return await this.personRepository.personPropertiesSize(personId, teamId)
}
reportBatch(): void {
@@ -1045,8 +1045,14 @@ export class BatchWritingPersonsStoreForBatch implements PersonsStoreForBatch, B
this.incrementDatabaseOperation(operation as MethodName, personUpdate.distinct_id)
// Convert PersonUpdate back to InternalPerson for database call
const person = toInternalPerson(personUpdate)
- // Create update object without version field (updatePerson handles version internally)
- const { version, ...updateFields } = person
+ // Always pass all mutable fields for consistent query plans
+ const updateFields = {
+ properties: person.properties,
+ properties_last_updated_at: person.properties_last_updated_at,
+ properties_last_operation: person.properties_last_operation,
+ is_identified: person.is_identified,
+ created_at: person.created_at,
+ }
this.incrementCount('updatePersonNoAssert', personUpdate.distinct_id)
this.incrementDatabaseOperation('updatePersonNoAssert', personUpdate.distinct_id)
diff --git a/plugin-server/src/worker/ingestion/persons/metrics.ts b/plugin-server/src/worker/ingestion/persons/metrics.ts
index f75fd89e8e..91ee8d22a3 100644
--- a/plugin-server/src/worker/ingestion/persons/metrics.ts
+++ b/plugin-server/src/worker/ingestion/persons/metrics.ts
@@ -207,3 +207,10 @@ export const personProfileBatchIgnoredPropertiesCounter = new Counter({
help: 'Count of specific properties that were ignored during person profile updates at batch level',
labelNames: ['property'],
})
+
+export const personJsonFieldSizeHistogram = new Histogram({
+ name: 'person_json_field_size_bytes',
+ help: 'Approximate size in bytes of serialized JSON fields (using string length as proxy for performance)',
+ labelNames: ['operation', 'field'], // operation: createPerson, updatePerson; field: properties, properties_last_updated_at, properties_last_operation
+ buckets: [100, 500, 1024, 4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576], // 100B, 500B, 1KB, 4KB, 8KB, 16KB, 32KB, 64KB, 128KB, 256KB, 512KB, 1MB
+})
diff --git a/plugin-server/src/worker/ingestion/persons/person-update-batch.ts b/plugin-server/src/worker/ingestion/persons/person-update-batch.ts
index 83755a60a6..f186bdce59 100644
--- a/plugin-server/src/worker/ingestion/persons/person-update-batch.ts
+++ b/plugin-server/src/worker/ingestion/persons/person-update-batch.ts
@@ -22,6 +22,8 @@ export interface PersonUpdate {
properties_to_unset: string[] // Property keys to unset
original_is_identified: boolean
original_created_at: DateTime
+ /** Internal flag to track which table this person exists in during cutover migration */
+ __useNewTable?: boolean
}
export interface PersonPropertyUpdate {
@@ -49,6 +51,7 @@ export function fromInternalPerson(person: InternalPerson, distinctId: string):
properties_to_unset: [],
original_is_identified: person.is_identified,
original_created_at: person.created_at,
+ __useNewTable: person.__useNewTable,
}
}
@@ -77,5 +80,6 @@ export function toInternalPerson(personUpdate: PersonUpdate): InternalPerson {
version: personUpdate.version,
is_identified: personUpdate.is_identified,
is_user_id: personUpdate.is_user_id,
+ __useNewTable: personUpdate.__useNewTable,
}
}
diff --git a/plugin-server/src/worker/ingestion/persons/persons-store-for-batch.ts b/plugin-server/src/worker/ingestion/persons/persons-store-for-batch.ts
index 030f0c1d21..c546b44b6c 100644
--- a/plugin-server/src/worker/ingestion/persons/persons-store-for-batch.ts
+++ b/plugin-server/src/worker/ingestion/persons/persons-store-for-batch.ts
@@ -128,7 +128,7 @@ export interface PersonsStoreForBatch extends BatchWritingStore {
/**
* Returns the size of the person properties
*/
- personPropertiesSize(personId: string): Promise
+ personPropertiesSize(personId: string, teamId: number): Promise
/**
* Fetch distinct ids for a person inside a transaction-aware wrapper
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/dualwrite-person-repository-transaction.ts b/plugin-server/src/worker/ingestion/persons/repositories/dualwrite-person-repository-transaction.ts
index a2bb839f41..562d5291f1 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/dualwrite-person-repository-transaction.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/dualwrite-person-repository-transaction.ts
@@ -3,7 +3,7 @@ import { DateTime } from 'luxon'
import { Properties } from '@posthog/plugin-scaffold'
import { TopicMessage } from '~/kafka/producer'
-import { InternalPerson, PropertiesLastOperation, PropertiesLastUpdatedAt, Team } from '~/types'
+import { InternalPerson, PersonUpdateFields, PropertiesLastOperation, PropertiesLastUpdatedAt, Team } from '~/types'
import { CreatePersonResult, MoveDistinctIdsResult } from '~/utils/db/db'
import { TransactionClient } from '~/utils/db/postgres'
@@ -81,7 +81,7 @@ export class DualWritePersonRepositoryTransaction implements PersonRepositoryTra
async updatePerson(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tag?: string
): Promise<[InternalPerson, TopicMessage[], boolean]> {
// Enforce version parity across primary/secondary: run primary first, then set secondary to primary's new version
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/person-repository-transaction.ts b/plugin-server/src/worker/ingestion/persons/repositories/person-repository-transaction.ts
index 6c1d1b1ba1..c887f24605 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/person-repository-transaction.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/person-repository-transaction.ts
@@ -3,7 +3,13 @@ import { DateTime } from 'luxon'
import { Properties } from '@posthog/plugin-scaffold'
import { TopicMessage } from '../../../../kafka/producer'
-import { InternalPerson, PropertiesLastOperation, PropertiesLastUpdatedAt, Team } from '../../../../types'
+import {
+ InternalPerson,
+ PersonUpdateFields,
+ PropertiesLastOperation,
+ PropertiesLastUpdatedAt,
+ Team,
+} from '../../../../types'
import { CreatePersonResult, MoveDistinctIdsResult } from '../../../../utils/db/db'
export interface PersonRepositoryTransaction {
@@ -21,7 +27,7 @@ export interface PersonRepositoryTransaction {
updatePerson(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tag?: string
): Promise<[InternalPerson, TopicMessage[], boolean]>
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/person-repository.ts b/plugin-server/src/worker/ingestion/persons/repositories/person-repository.ts
index 8640664788..c62ed20615 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/person-repository.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/person-repository.ts
@@ -3,7 +3,14 @@ import { DateTime } from 'luxon'
import { Properties } from '@posthog/plugin-scaffold'
import { TopicMessage } from '../../../../kafka/producer'
-import { InternalPerson, PropertiesLastOperation, PropertiesLastUpdatedAt, Team, TeamId } from '../../../../types'
+import {
+ InternalPerson,
+ PersonUpdateFields,
+ PropertiesLastOperation,
+ PropertiesLastUpdatedAt,
+ Team,
+ TeamId,
+} from '../../../../types'
import { CreatePersonResult } from '../../../../utils/db/db'
import { PersonUpdate } from '../person-update-batch'
import { PersonRepositoryTransaction } from './person-repository-transaction'
@@ -50,7 +57,7 @@ export interface PersonRepository {
updatePerson(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tag?: string
): Promise<[InternalPerson, TopicMessage[], boolean]>
@@ -63,7 +70,7 @@ export interface PersonRepository {
addPersonlessDistinctId(teamId: Team['id'], distinctId: string): Promise
addPersonlessDistinctIdForMerge(teamId: Team['id'], distinctId: string): Promise
- personPropertiesSize(personId: string): Promise
+ personPropertiesSize(personId: string, teamId: number): Promise
updateCohortsAndFeatureFlagsForMerge(
teamID: Team['id'],
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository-2pc.test.ts b/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository-2pc.test.ts
index 4de2a4c851..b69f9b3939 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository-2pc.test.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository-2pc.test.ts
@@ -9,6 +9,7 @@ import { PostgresDualWritePersonRepository } from './postgres-dualwrite-person-r
import {
assertConsistencyAcrossDatabases,
cleanupPrepared,
+ createPersonUpdateFields,
getFirstTeam,
mockDatabaseError,
setupMigrationDb,
@@ -278,7 +279,10 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
[{ distinctId: 'dw-2' }]
)) as any
- const [updated] = await repository.updatePerson(person, { properties: { name: 'B' } })
+ const [updated] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, { properties: { name: 'B' } })
+ )
const primary = await postgres.query(
PostgresUse.PERSONS_READ,
@@ -308,9 +312,9 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
.spyOn((repository as any).secondaryRepo, 'updatePerson')
.mockRejectedValue(new Error('simulated secondary failure'))
- await expect(repository.updatePerson(person, { properties: { y: 2 } }, 'test-fail')).rejects.toThrow(
- 'simulated secondary failure'
- )
+ await expect(
+ repository.updatePerson(person, createPersonUpdateFields(person, { properties: { y: 2 } }), 'test-fail')
+ ).rejects.toThrow('simulated secondary failure')
spy.mockRestore()
@@ -349,9 +353,9 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
const mockSpy = mockDatabaseError(postgres, new Error('primary update failed'), 'updatePerson')
- await expect(repository.updatePerson(person, { properties: { name: 'Updated' } })).rejects.toThrow(
- 'primary update failed'
- )
+ await expect(
+ repository.updatePerson(person, createPersonUpdateFields(person, { properties: { name: 'Updated' } }))
+ ).rejects.toThrow('primary update failed')
mockSpy.mockRestore()
@@ -392,9 +396,9 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
const mockSpy = mockDatabaseError(migrationPostgres, new Error('secondary update failed'), 'updatePerson')
- await expect(repository.updatePerson(person, { properties: { name: 'Updated' } })).rejects.toThrow(
- 'secondary update failed'
- )
+ await expect(
+ repository.updatePerson(person, createPersonUpdateFields(person, { properties: { name: 'Updated' } }))
+ ).rejects.toThrow('secondary update failed')
mockSpy.mockRestore()
@@ -1393,9 +1397,12 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
await tx.addDistinctId(createResult.person, 'tx-did-2', 1)
// Update the person
- const [updatedPerson] = await tx.updatePerson(createResult.person, {
- properties: { name: 'Updated Name', age: 26 },
- })
+ const [updatedPerson] = await tx.updatePerson(
+ createResult.person,
+ createPersonUpdateFields(createResult.person, {
+ properties: { name: 'Updated Name', age: 26 },
+ })
+ )
return updatedPerson
})
@@ -1728,7 +1735,10 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
throw new Error('Failed to create person')
}
- await tx.updatePerson(createResult.person, { properties: { updated: true } })
+ await tx.updatePerson(
+ createResult.person,
+ createPersonUpdateFields(createResult.person, { properties: { updated: true } })
+ )
return createResult.person
})
@@ -2087,9 +2097,12 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
// Now use it within a transaction
const txResult = await repository.inTransaction('test-mixed-calls', async (tx) => {
// Update the person created outside
- const [updated] = await tx.updatePerson(outsidePerson, {
- properties: { location: 'updated-inside', new_prop: 'added' },
- })
+ const [updated] = await tx.updatePerson(
+ outsidePerson,
+ createPersonUpdateFields(outsidePerson, {
+ properties: { location: 'updated-inside', new_prop: 'added' },
+ })
+ )
// Add a distinct ID
await tx.addDistinctId(updated, 'added-in-tx', 1)
@@ -2141,7 +2154,10 @@ describe('PostgresDualWritePersonRepository 2PC Dual-Write Tests', () => {
)) as any
const result = await repository.inTransaction('test-version-sync', async (tx) => {
- const [updatedPerson] = await tx.updatePerson(person, { properties: { updated: 'value' } })
+ const [updatedPerson] = await tx.updatePerson(
+ person,
+ createPersonUpdateFields(person, { properties: { updated: 'value' } })
+ )
return updatedPerson
})
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository.ts b/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository.ts
index dac85eb6fb..a4482a9b28 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/postgres-dualwrite-person-repository.ts
@@ -3,7 +3,14 @@ import { DateTime } from 'luxon'
import { Properties } from '@posthog/plugin-scaffold'
import { TopicMessage } from '../../../../kafka/producer'
-import { InternalPerson, PropertiesLastOperation, PropertiesLastUpdatedAt, Team, TeamId } from '../../../../types'
+import {
+ InternalPerson,
+ PersonUpdateFields,
+ PropertiesLastOperation,
+ PropertiesLastUpdatedAt,
+ Team,
+ TeamId,
+} from '../../../../types'
import { CreatePersonResult, MoveDistinctIdsResult } from '../../../../utils/db/db'
import { PostgresRouter, PostgresUse } from '../../../../utils/db/postgres'
import { TwoPhaseCommitCoordinator } from '../../../../utils/db/two-phase'
@@ -132,7 +139,7 @@ export class PostgresDualWritePersonRepository implements PersonRepository {
async updatePerson(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tag?: string
): Promise<[InternalPerson, TopicMessage[], boolean]> {
// Enforce version parity across primary/secondary: run primary first, then set secondary to primary's new version
@@ -142,11 +149,12 @@ export class PostgresDualWritePersonRepository implements PersonRepository {
primaryOut = p
const primaryUpdated = p[0]
- const secondaryUpdate: Partial = {
+ const secondaryUpdate: PersonUpdateFields = {
properties: primaryUpdated.properties,
properties_last_updated_at: primaryUpdated.properties_last_updated_at,
properties_last_operation: primaryUpdated.properties_last_operation,
is_identified: primaryUpdated.is_identified,
+ created_at: primaryUpdated.created_at,
version: primaryUpdated.version,
}
@@ -342,8 +350,8 @@ export class PostgresDualWritePersonRepository implements PersonRepository {
return isMerged
}
- async personPropertiesSize(personId: string): Promise {
- return await this.primaryRepo.personPropertiesSize(personId)
+ async personPropertiesSize(personId: string, teamId: number): Promise {
+ return await this.primaryRepo.personPropertiesSize(personId, teamId)
}
async updateCohortsAndFeatureFlagsForMerge(
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository-transaction.ts b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository-transaction.ts
index 5bfd05d617..679c35db93 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository-transaction.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository-transaction.ts
@@ -3,7 +3,13 @@ import { DateTime } from 'luxon'
import { Properties } from '@posthog/plugin-scaffold'
import { TopicMessage } from '../../../../kafka/producer'
-import { InternalPerson, PropertiesLastOperation, PropertiesLastUpdatedAt, Team } from '../../../../types'
+import {
+ InternalPerson,
+ PersonUpdateFields,
+ PropertiesLastOperation,
+ PropertiesLastUpdatedAt,
+ Team,
+} from '../../../../types'
import { CreatePersonResult, MoveDistinctIdsResult } from '../../../../utils/db/db'
import { TransactionClient } from '../../../../utils/db/postgres'
import { PersonRepositoryTransaction } from './person-repository-transaction'
@@ -42,7 +48,7 @@ export class PostgresPersonRepositoryTransaction implements PersonRepositoryTran
async updatePerson(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tag?: string
): Promise<[InternalPerson, TopicMessage[], boolean]> {
return await this.repository.updatePerson(person, update, tag, this.transaction)
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.test.ts b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.test.ts
index c1c7c8874f..7b4ec5bd7d 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.test.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.test.ts
@@ -1,14 +1,14 @@
import { DateTime } from 'luxon'
import { createTeam, insertRow, resetTestDatabase } from '../../../../../tests/helpers/sql'
-import { Hub, InternalPerson, Team } from '../../../../types'
+import { Hub, InternalPerson, PropertyUpdateOperation, Team } from '../../../../types'
import { closeHub, createHub } from '../../../../utils/db/hub'
import { PostgresRouter, PostgresUse } from '../../../../utils/db/postgres'
import { parseJSON } from '../../../../utils/json-parse'
import { NoRowsUpdatedError, UUIDT } from '../../../../utils/utils'
import { PersonPropertiesSizeViolationError } from './person-repository'
import { PostgresPersonRepository } from './postgres-person-repository'
-import { fetchDistinctIdValues, fetchDistinctIds } from './test-helpers'
+import { createPersonUpdateFields, fetchDistinctIdValues, fetchDistinctIds } from './test-helpers'
jest.mock('../../../../utils/logger')
@@ -491,10 +491,12 @@ describe('PostgresPersonRepository', () => {
const result = await repository.moveDistinctIds(sourcePerson, nonExistentTargetPerson, undefined)
- expect(result.success).toBe(false)
- if (!result.success) {
- expect(result.error).toBe('TargetNotFound')
- }
+ // TODO: This should be false, but we need to allow this to happen since we remove the
+ // foreign key constraint on the distinct ID table.
+ expect(result.success).toBe(true)
+ // if (!result.success) {
+ // expect(result.error).toBe('TargetNotFound')
+ // }
})
it('should handle source person not found', async () => {
@@ -1093,15 +1095,16 @@ describe('PostgresPersonRepository', () => {
},
})
- const size = await repository.personPropertiesSize(person.id)
+ const size = await repository.personPropertiesSize(person.id, team.id)
expect(size).toBeGreaterThan(0)
expect(typeof size).toBe('number')
})
it('should return 0 for non-existent person', async () => {
+ const team = await getFirstTeam(hub)
const fakePersonId = '999999' // Use a numeric ID instead of UUID
- const size = await repository.personPropertiesSize(fakePersonId)
+ const size = await repository.personPropertiesSize(fakePersonId, team.id)
expect(size).toBe(0)
})
@@ -1117,11 +1120,11 @@ describe('PostgresPersonRepository', () => {
const person2 = await createTestPerson(team2Id, 'different-distinct', { name: 'Team 2 Person' })
// Check size for person 1
- const size1 = await repository.personPropertiesSize(person1.id)
+ const size1 = await repository.personPropertiesSize(person1.id, team1.id)
expect(size1).toBeGreaterThan(0)
// Check size for person 2
- const size2 = await repository.personPropertiesSize(person2.id)
+ const size2 = await repository.personPropertiesSize(person2.id, team2Id)
expect(size2).toBeGreaterThan(0)
})
@@ -1130,7 +1133,7 @@ describe('PostgresPersonRepository', () => {
// Create person with minimal properties
const minimalPerson = await createTestPerson(team.id, 'minimal-person', { name: 'Minimal' })
- const minimalSize = await repository.personPropertiesSize(minimalPerson.id)
+ const minimalSize = await repository.personPropertiesSize(minimalPerson.id, team.id)
// Create person with extensive properties
const extensiveProperties = {
@@ -1159,7 +1162,7 @@ describe('PostgresPersonRepository', () => {
},
}
const extensivePerson = await createTestPerson(team.id, 'extensive-person', extensiveProperties)
- const extensiveSize = await repository.personPropertiesSize(extensivePerson.id)
+ const extensiveSize = await repository.personPropertiesSize(extensivePerson.id, team.id)
expect(extensiveSize).toBeGreaterThan(minimalSize)
})
@@ -1171,7 +1174,10 @@ describe('PostgresPersonRepository', () => {
const person = await createTestPerson(team.id, 'test-distinct', { name: 'John', age: 25 })
const update = { properties: { name: 'Jane', age: 30, city: 'New York' } }
- const [updatedPerson, messages, versionDisparity] = await repository.updatePerson(person, update)
+ const [updatedPerson, messages, versionDisparity] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update)
+ )
expect(updatedPerson.properties).toEqual({ name: 'Jane', age: 30, city: 'New York' })
expect(updatedPerson.version).toBe(person.version + 1)
@@ -1184,23 +1190,15 @@ describe('PostgresPersonRepository', () => {
expect(fetchedPerson?.version).toBe(person.version + 1)
})
- it('should handle empty update gracefully', async () => {
- const team = await getFirstTeam(hub)
- const person = await createTestPerson(team.id, 'test-distinct', { name: 'John' })
-
- const [updatedPerson, messages, versionDisparity] = await repository.updatePerson(person, {})
-
- expect(updatedPerson).toEqual(person)
- expect(messages).toHaveLength(0)
- expect(versionDisparity).toBe(false)
- })
-
it('should update is_identified field', async () => {
const team = await getFirstTeam(hub)
const person = await createTestPerson(team.id, 'test-distinct', { name: 'John' })
const update = { is_identified: true }
- const [updatedPerson, messages] = await repository.updatePerson(person, update)
+ const [updatedPerson, messages] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update)
+ )
expect(updatedPerson.is_identified).toBe(true)
expect(updatedPerson.version).toBe(person.version + 1)
@@ -1213,11 +1211,17 @@ describe('PostgresPersonRepository', () => {
// First update
const update1 = { properties: { name: 'Jane' } }
- const [updatedPerson1, _messages1] = await repository.updatePerson(person, update1)
+ const [updatedPerson1, _messages1] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update1)
+ )
// Second update with the updated person (should succeed since we're using the latest version)
const update2 = { properties: { age: 30 } }
- const [updatedPerson2, messages2] = await repository.updatePerson(updatedPerson1, update2)
+ const [updatedPerson2, messages2] = await repository.updatePerson(
+ updatedPerson1,
+ createPersonUpdateFields(updatedPerson1, update2)
+ )
// updatePerson replaces properties entirely, so we expect only the age property
expect(updatedPerson2.properties).toEqual({ age: 30 })
@@ -1231,7 +1235,12 @@ describe('PostgresPersonRepository', () => {
await postgres.transaction(PostgresUse.PERSONS_WRITE, 'test-transaction', async (tx) => {
const update = { properties: { name: 'Jane' } }
- const [updatedPerson, messages] = await repository.updatePerson(person, update, 'tx', tx)
+ const [updatedPerson, messages] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update),
+ 'tx',
+ tx
+ )
expect(updatedPerson.properties).toEqual({ name: 'Jane' })
expect(messages).toHaveLength(1)
@@ -1247,7 +1256,11 @@ describe('PostgresPersonRepository', () => {
const person = await createTestPerson(team.id, 'test-distinct', { name: 'John' })
const update = { properties: { name: 'Jane' } }
- const [updatedPerson, messages] = await repository.updatePerson(person, update, 'test-tag')
+ const [updatedPerson, messages] = await repository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update),
+ 'test-tag'
+ )
expect(updatedPerson.properties).toEqual({ name: 'Jane' })
expect(messages).toHaveLength(1)
@@ -1269,7 +1282,9 @@ describe('PostgresPersonRepository', () => {
}
const update = { properties: { name: 'Jane' } }
- await expect(repository.updatePerson(nonExistentPerson, update)).rejects.toThrow(NoRowsUpdatedError)
+ await expect(
+ repository.updatePerson(nonExistentPerson, createPersonUpdateFields(nonExistentPerson, update))
+ ).rejects.toThrow(NoRowsUpdatedError)
})
it('should handle updatePersonAssertVersion with optimistic concurrency control', async () => {
@@ -1734,7 +1749,10 @@ describe('PostgresPersonRepository', () => {
},
}
- const [updatedPerson, messages] = await oversizedRepository.updatePerson(normalPerson, oversizedUpdate)
+ const [updatedPerson, messages] = await oversizedRepository.updatePerson(
+ normalPerson,
+ createPersonUpdateFields(normalPerson, oversizedUpdate)
+ )
expect(updatedPerson).toBeDefined()
expect(updatedPerson.version).toBe(normalPerson.version + 1)
@@ -1769,12 +1787,18 @@ describe('PostgresPersonRepository', () => {
},
}
- await expect(oversizedRepository.updatePerson(normalPerson, oversizedUpdate)).rejects.toThrow(
- PersonPropertiesSizeViolationError
- )
- await expect(oversizedRepository.updatePerson(normalPerson, oversizedUpdate)).rejects.toThrow(
- 'Person properties update would exceed size limit'
- )
+ await expect(
+ oversizedRepository.updatePerson(
+ normalPerson,
+ createPersonUpdateFields(normalPerson, oversizedUpdate)
+ )
+ ).rejects.toThrow(PersonPropertiesSizeViolationError)
+ await expect(
+ oversizedRepository.updatePerson(
+ normalPerson,
+ createPersonUpdateFields(normalPerson, oversizedUpdate)
+ )
+ ).rejects.toThrow('Person properties update would exceed size limit')
mockPersonPropertiesSize.mockRestore()
mockQuery.mockRestore()
@@ -1816,14 +1840,20 @@ describe('PostgresPersonRepository', () => {
},
}
- await expect(oversizedRepository.updatePerson(normalPerson, oversizedUpdate)).rejects.toThrow(
- PersonPropertiesSizeViolationError
- )
+ await expect(
+ oversizedRepository.updatePerson(
+ normalPerson,
+ createPersonUpdateFields(normalPerson, oversizedUpdate)
+ )
+ ).rejects.toThrow(PersonPropertiesSizeViolationError)
updateCallCount = 0
- await expect(oversizedRepository.updatePerson(normalPerson, oversizedUpdate)).rejects.toThrow(
- 'Person properties update failed after trying to trim oversized properties'
- )
+ await expect(
+ oversizedRepository.updatePerson(
+ normalPerson,
+ createPersonUpdateFields(normalPerson, oversizedUpdate)
+ )
+ ).rejects.toThrow('Person properties update failed after trying to trim oversized properties')
mockPersonPropertiesSize.mockRestore()
mockQuery.mockRestore()
@@ -1883,16 +1913,16 @@ describe('PostgresPersonRepository', () => {
},
}
- await expect(oversizedRepository.updatePerson(oversizedPerson, update)).rejects.toThrow(
- PersonPropertiesSizeViolationError
- )
+ await expect(
+ oversizedRepository.updatePerson(oversizedPerson, createPersonUpdateFields(oversizedPerson, update))
+ ).rejects.toThrow(PersonPropertiesSizeViolationError)
expect(updateCallCount).toBe(2)
updateCallCount = 0
- await expect(oversizedRepository.updatePerson(oversizedPerson, update)).rejects.toThrow(
- 'Person properties update failed after trying to trim oversized properties'
- )
+ await expect(
+ oversizedRepository.updatePerson(oversizedPerson, createPersonUpdateFields(oversizedPerson, update))
+ ).rejects.toThrow('Person properties update failed after trying to trim oversized properties')
expect(updateCallCount).toBe(2)
@@ -2050,7 +2080,7 @@ describe('PostgresPersonRepository', () => {
}
try {
- await oversizedRepository.updatePerson(person, oversizedUpdate)
+ await oversizedRepository.updatePerson(person, createPersonUpdateFields(person, oversizedUpdate))
expect(mockInc).toHaveBeenCalledWith({ result: 'success' })
} catch (error) {}
@@ -2164,12 +2194,12 @@ describe('PostgresPersonRepository', () => {
const [updatedPerson1, messages1, versionDisparity1] = await repositoryWithCalculation.updatePerson(
person1,
- update,
+ createPersonUpdateFields(person1, update),
'test-with-logging'
)
const [updatedPerson2, messages2, versionDisparity2] = await repositoryWithoutCalculation.updatePerson(
person2,
- update,
+ createPersonUpdateFields(person2, update),
'test-without-logging'
)
@@ -2253,7 +2283,10 @@ describe('PostgresPersonRepository', () => {
const person = await createTestPerson(team.id, 'test-default', { name: 'John' })
const update = { properties: { name: 'Jane', city: 'Boston' } }
- const [updatedPerson, messages, versionDisparity] = await defaultRepository.updatePerson(person, update)
+ const [updatedPerson, messages, versionDisparity] = await defaultRepository.updatePerson(
+ person,
+ createPersonUpdateFields(person, update)
+ )
expect(updatedPerson.properties).toEqual({ name: 'Jane', city: 'Boston' })
expect(updatedPerson.version).toBe(person.version + 1)
@@ -2261,6 +2294,2091 @@ describe('PostgresPersonRepository', () => {
expect(versionDisparity).toBe(false)
})
})
+
+ describe('JSON field size metrics', () => {
+ let personJsonFieldSizeHistogram: any
+ let labelsSpy: jest.SpyInstance
+ let observeCalls: any[]
+
+ beforeEach(() => {
+ // Import the histogram
+ const metricsModule = require('../metrics')
+ personJsonFieldSizeHistogram = metricsModule.personJsonFieldSizeHistogram
+
+ // Track observe calls
+ observeCalls = []
+
+ // Spy on labels to intercept and spy on observe
+ const originalLabels = personJsonFieldSizeHistogram.labels.bind(personJsonFieldSizeHistogram)
+ labelsSpy = jest.spyOn(personJsonFieldSizeHistogram, 'labels').mockImplementation(function (labels: any) {
+ const labeledInstance = originalLabels(labels)
+ const originalObserve = labeledInstance.observe.bind(labeledInstance)
+ labeledInstance.observe = jest.fn((value: number) => {
+ observeCalls.push({ labels, value })
+ return originalObserve(value)
+ })
+ return labeledInstance
+ })
+ })
+
+ afterEach(() => {
+ if (labelsSpy) {
+ labelsSpy.mockRestore()
+ }
+ observeCalls = []
+ })
+
+ it('should track JSON field sizes on createPerson', async () => {
+ const team = await getFirstTeam(hub)
+ const properties = { name: 'Alice', email: 'alice@example.com', age: 25 }
+ const propertiesLastUpdatedAt = { name: '2024-01-15T10:30:00.000Z', email: '2024-01-15T10:30:00.000Z' }
+ const propertiesLastOperation = { name: PropertyUpdateOperation.Set, email: PropertyUpdateOperation.Set }
+
+ // Pre-serialize to calculate expected sizes
+ const expectedPropertiesSize = JSON.stringify(properties).length
+ const expectedPropertiesLastUpdatedAtSize = JSON.stringify(propertiesLastUpdatedAt).length
+ const expectedPropertiesLastOperationSize = JSON.stringify(propertiesLastOperation).length
+
+ await repository.createPerson(
+ TIMESTAMP,
+ properties,
+ propertiesLastUpdatedAt,
+ propertiesLastOperation,
+ team.id,
+ null,
+ true,
+ new UUIDT().toString(),
+ [{ distinctId: 'test-metrics-create' }]
+ )
+
+ // Verify metrics were recorded for all three fields (3 calls total)
+ expect(observeCalls).toHaveLength(3)
+
+ // Verify each field was recorded with operation='createPerson' and exact size
+ const propertiesCall = observeCalls.find((c) => c.labels.field === 'properties')
+ const propertiesLastUpdatedAtCall = observeCalls.find(
+ (c) => c.labels.field === 'properties_last_updated_at'
+ )
+ const propertiesLastOperationCall = observeCalls.find((c) => c.labels.field === 'properties_last_operation')
+
+ expect(propertiesCall).toBeDefined()
+ expect(propertiesCall!.labels.operation).toBe('createPerson')
+ expect(propertiesCall!.value).toBe(expectedPropertiesSize)
+
+ expect(propertiesLastUpdatedAtCall).toBeDefined()
+ expect(propertiesLastUpdatedAtCall!.labels.operation).toBe('createPerson')
+ expect(propertiesLastUpdatedAtCall!.value).toBe(expectedPropertiesLastUpdatedAtSize)
+
+ expect(propertiesLastOperationCall).toBeDefined()
+ expect(propertiesLastOperationCall!.labels.operation).toBe('createPerson')
+ expect(propertiesLastOperationCall!.value).toBe(expectedPropertiesLastOperationSize)
+ })
+
+ it('should track JSON field sizes on updatePerson with properties', async () => {
+ const team = await getFirstTeam(hub)
+ const person = await createTestPerson(team.id, 'test-metrics-update', { name: 'Bob' })
+
+ // Clear observe calls from createTestPerson
+ observeCalls = []
+
+ const update = {
+ properties: { name: 'Bob Updated', city: 'San Francisco', data: 'x'.repeat(1000) },
+ properties_last_updated_at: { name: '2024-01-16T10:30:00.000Z', city: '2024-01-16T10:30:00.000Z' },
+ properties_last_operation: { name: PropertyUpdateOperation.Set, city: PropertyUpdateOperation.Set },
+ }
+
+ // Pre-serialize to calculate expected sizes
+ const expectedPropertiesSize = JSON.stringify(update.properties).length
+ const expectedPropertiesLastUpdatedAtSize = JSON.stringify(update.properties_last_updated_at).length
+ const expectedPropertiesLastOperationSize = JSON.stringify(update.properties_last_operation).length
+
+ await repository.updatePerson(person, createPersonUpdateFields(person, update))
+
+ // Verify metrics were recorded for all updated fields (3 calls total)
+ expect(observeCalls).toHaveLength(3)
+
+ // Verify each field was recorded with exact size
+ const propertiesCall = observeCalls.find((c) => c.labels.field === 'properties')
+ const propertiesLastUpdatedAtCall = observeCalls.find(
+ (c) => c.labels.field === 'properties_last_updated_at'
+ )
+ const propertiesLastOperationCall = observeCalls.find((c) => c.labels.field === 'properties_last_operation')
+
+ expect(propertiesCall).toBeDefined()
+ expect(propertiesCall!.labels.operation).toBe('updatePerson')
+ expect(propertiesCall!.value).toBe(expectedPropertiesSize)
+
+ expect(propertiesLastUpdatedAtCall).toBeDefined()
+ expect(propertiesLastUpdatedAtCall!.labels.operation).toBe('updatePerson')
+ expect(propertiesLastUpdatedAtCall!.value).toBe(expectedPropertiesLastUpdatedAtSize)
+
+ expect(propertiesLastOperationCall).toBeDefined()
+ expect(propertiesLastOperationCall!.labels.operation).toBe('updatePerson')
+ expect(propertiesLastOperationCall!.value).toBe(expectedPropertiesLastOperationSize)
+ })
+
+ it('should only track metrics for fields being updated', async () => {
+ const team = await getFirstTeam(hub)
+ const person = await createTestPerson(team.id, 'test-metrics-partial', { name: 'Charlie' })
+
+ // Clear observe calls from createTestPerson
+ observeCalls = []
+
+ // Only update properties, not the other fields
+ const update = {
+ properties: { name: 'Charlie Updated' },
+ }
+
+ const expectedPropertiesSize = JSON.stringify(update.properties).length
+
+ await repository.updatePerson(person, createPersonUpdateFields(person, update))
+
+ // Since we always pass all fields for consistent query plans, all 3 JSONB fields are tracked
+ expect(observeCalls).toHaveLength(3)
+
+ const propertiesCall = observeCalls.find((c) => c.labels.field === 'properties')
+ expect(propertiesCall).toBeDefined()
+ expect(propertiesCall!.labels.operation).toBe('updatePerson')
+ expect(propertiesCall!.value).toBe(expectedPropertiesSize)
+ })
+
+ it('should handle large properties correctly', async () => {
+ const team = await getFirstTeam(hub)
+ const largeProperties = {
+ name: 'David',
+ large_field: 'z'.repeat(100000), // 100KB of data
+ }
+
+ // Pre-serialize to calculate expected sizes
+ const expectedPropertiesSize = JSON.stringify(largeProperties).length
+ const expectedPropertiesLastUpdatedAtSize = JSON.stringify({}).length
+ const expectedPropertiesLastOperationSize = JSON.stringify({}).length
+
+ await repository.createPerson(
+ TIMESTAMP,
+ largeProperties,
+ {},
+ {},
+ team.id,
+ null,
+ true,
+ new UUIDT().toString(),
+ [{ distinctId: 'test-metrics-large' }]
+ )
+
+ // Should have 3 calls (properties, properties_last_updated_at, properties_last_operation)
+ expect(observeCalls).toHaveLength(3)
+
+ // Verify exact sizes
+ const propertiesCall = observeCalls.find((c) => c.labels.field === 'properties')
+ const propertiesLastUpdatedAtCall = observeCalls.find(
+ (c) => c.labels.field === 'properties_last_updated_at'
+ )
+ const propertiesLastOperationCall = observeCalls.find((c) => c.labels.field === 'properties_last_operation')
+
+ expect(propertiesCall).toBeDefined()
+ expect(propertiesCall!.labels.operation).toBe('createPerson')
+ expect(propertiesCall!.value).toBe(expectedPropertiesSize)
+ expect(propertiesCall!.value).toBeGreaterThan(100000) // Sanity check
+
+ expect(propertiesLastUpdatedAtCall).toBeDefined()
+ expect(propertiesLastUpdatedAtCall!.value).toBe(expectedPropertiesLastUpdatedAtSize)
+
+ expect(propertiesLastOperationCall).toBeDefined()
+ expect(propertiesLastOperationCall!.value).toBe(expectedPropertiesLastOperationSize)
+ })
+
+ it('should not record metrics when update is empty', async () => {
+ const team = await getFirstTeam(hub)
+ const person = await createTestPerson(team.id, 'test-metrics-empty', { name: 'Eve' })
+
+ // Clear observe calls from createTestPerson
+ observeCalls = []
+
+ // Empty update - but helper fills in all fields from person
+ const update = {}
+
+ await repository.updatePerson(person, createPersonUpdateFields(person, update))
+
+ // Since we always pass all fields for consistent query plans, all 3 JSONB fields are tracked
+ // even though the values haven't changed from the person object
+ expect(observeCalls).toHaveLength(3)
+ })
+ })
+
+ describe('Table Cutover', () => {
+ let cutoverRepository: PostgresPersonRepository
+ const NEW_TABLE_NAME = 'posthog_person_new'
+ const ID_OFFSET = 1000000
+
+ beforeEach(() => {
+ // Create repository with cutover enabled
+ cutoverRepository = new PostgresPersonRepository(postgres, {
+ calculatePropertiesSize: 0,
+ personPropertiesDbConstraintLimitBytes: 1024 * 1024,
+ personPropertiesTrimTargetBytes: 512 * 1024,
+ tableCutoverEnabled: true,
+ newTableName: NEW_TABLE_NAME,
+ newTableIdOffset: ID_OFFSET,
+ })
+ })
+
+ describe('createPerson()', () => {
+ it('should create new persons in the new table when cutover is enabled', async () => {
+ const team = await getFirstTeam(hub)
+ const uuid = new UUIDT().toString()
+
+ const result = await cutoverRepository.createPerson(
+ TIMESTAMP,
+ { name: 'New Table Person' },
+ {},
+ {},
+ team.id,
+ null,
+ true,
+ uuid,
+ [{ distinctId: 'new-table-distinct' }]
+ )
+
+ expect(result.success).toBe(true)
+ if (!result.success) {
+ throw new Error('Failed to create person')
+ }
+
+ // Verify person is in new table
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE uuid = $1`,
+ [uuid],
+ 'checkNewTable'
+ )
+ expect(newTableResult.rows).toHaveLength(1)
+
+ // Verify person is NOT in old table
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE uuid = $1`,
+ [uuid],
+ 'checkOldTable'
+ )
+ expect(oldTableResult.rows).toHaveLength(0)
+ })
+ })
+
+ describe('fetchPerson()', () => {
+ it('should fetch person from old table when ID < offset', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in old table with low ID
+ const lowId = 100
+ const oldUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ oldUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-distinct-old', lowId, team.id, 0],
+ 'insertOldTableDistinctId'
+ )
+
+ // Create a different person in new table with high ID but same distinct_id to verify routing
+ const highId = ID_OFFSET + 100
+ const newUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ newUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Table Wrong' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+
+ // Fetch should return the old table person based on person_id from distinct_id mapping
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-distinct-old')
+
+ expect(person).toBeDefined()
+ expect(person!.id).toBe(String(lowId))
+ expect(person!.uuid).toBe(oldUuid)
+ expect(person!.properties.name).toBe('Old Table')
+ })
+
+ it('should fetch person from new table when ID >= offset', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in new table with high ID
+ const highId = ID_OFFSET + 100
+ const newUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ newUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-distinct-new', highId, team.id, 0],
+ 'insertNewTableDistinctId'
+ )
+
+ // Create a different person in old table with low ID but same distinct_id to verify routing
+ const lowId = 200
+ const oldUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ oldUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Table Wrong' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldTablePerson'
+ )
+
+ // Fetch should return the new table person based on person_id from distinct_id mapping
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-distinct-new')
+
+ expect(person).toBeDefined()
+ expect(person!.id).toBe(String(highId))
+ expect(person!.uuid).toBe(newUuid)
+ expect(person!.properties.name).toBe('New Table')
+ })
+
+ it('should return undefined for non-existent distinct ID', async () => {
+ const team = await getFirstTeam(hub)
+ const person = await cutoverRepository.fetchPerson(team.id, 'non-existent')
+ expect(person).toBeUndefined()
+ })
+
+ it('should check new table first and mark __useNewTable=true when person exists there', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person with low ID (below threshold) but in new table
+ const lowId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'In New Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-in-new', lowId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Fetch should check new table first and return with __useNewTable=true
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-in-new')
+
+ expect(person).toBeDefined()
+ expect(person!.id).toBe(String(lowId))
+ expect(person!.uuid).toBe(uuid)
+ expect(person!.properties.name).toBe('In New Table')
+ expect(person!.__useNewTable).toBe(true)
+ })
+
+ it('should opportunistically copy person from old table to new table and mark __useNewTable=true', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in old table only
+ const oldId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'In Old Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-in-old', oldId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Verify person doesn't exist in new table yet
+ const { rows: beforeRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [oldId, team.id],
+ 'checkNewTableBefore'
+ )
+ expect(beforeRows.length).toBe(0)
+
+ // Fetch should check new table first (not found), find in old table, then copy to new table
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-in-old')
+
+ expect(person).toBeDefined()
+ expect(person!.id).toBe(String(oldId))
+ expect(person!.uuid).toBe(uuid)
+ expect(person!.properties.name).toBe('In Old Table')
+ // After opportunistic copy, __useNewTable should be true
+ expect(person!.__useNewTable).toBe(true)
+
+ // Verify person was copied to new table
+ const { rows: afterRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [oldId, team.id],
+ 'checkNewTableAfter'
+ )
+ expect(afterRows.length).toBe(1)
+ expect(afterRows[0].id).toBe(String(oldId))
+ expect(afterRows[0].uuid).toBe(uuid)
+ expect(afterRows[0].properties).toEqual({ name: 'In Old Table' })
+ })
+
+ it('should handle ON CONFLICT gracefully when person already exists in new table', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in BOTH old and new tables (simulating a partial migration state)
+ const personId = 100
+ const uuid = new UUIDT().toString()
+
+ // Insert in old table
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ personId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'In Old Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 5,
+ ],
+ 'insertOldTablePerson'
+ )
+
+ // Insert in new table with different version (newer)
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ personId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Already in New Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 10,
+ ],
+ 'insertNewTablePerson'
+ )
+
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-conflict', personId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Fetch should find in new table first (not attempt copy from old)
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-conflict')
+
+ expect(person).toBeDefined()
+ expect(person!.__useNewTable).toBe(true)
+ expect(person!.properties.name).toBe('Already in New Table')
+ expect(person!.version).toBe(10) // New table version, not old table version
+
+ // Verify new table data is unchanged (ON CONFLICT DO NOTHING didn't overwrite)
+ const { rows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT properties, version FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [personId, team.id],
+ 'checkNewTableUnchanged'
+ )
+ expect(rows.length).toBe(1)
+ expect(rows[0].properties).toEqual({ name: 'Already in New Table' })
+ expect(rows[0].version).toBe('10')
+ })
+
+ it('should route subsequent updates to new table after opportunistic copy', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in old table only
+ const oldId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Original' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-update-after-copy', oldId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // First fetch - triggers opportunistic copy
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-update-after-copy')
+ expect(person!.__useNewTable).toBe(true)
+
+ // Update the person - should go to new table (avoiding slow old table trigger)
+ await cutoverRepository.updatePerson(person!, {
+ properties: { name: 'Updated in New Table' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_identified: true,
+ created_at: TIMESTAMP,
+ })
+
+ // Verify update went to new table
+ const { rows: newTableRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT properties FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [oldId, team.id],
+ 'checkNewTableUpdate'
+ )
+ expect(newTableRows.length).toBe(1)
+ expect(newTableRows[0].properties).toEqual({ name: 'Updated in New Table' })
+
+ // Verify old table was NOT updated (still has original value)
+ const { rows: oldTableRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT properties FROM posthog_person WHERE id = $1 AND team_id = $2',
+ [oldId, team.id],
+ 'checkOldTableNotUpdated'
+ )
+ expect(oldTableRows.length).toBe(1)
+ expect(oldTableRows[0].properties).toEqual({ name: 'Original' })
+ })
+
+ it('should route updates to new table when __useNewTable=true', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in new table
+ const personId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ personId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ original: 'value' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-update-new', personId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Fetch person (should have __useNewTable=true)
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-update-new')
+ expect(person!.__useNewTable).toBe(true)
+
+ // Update the person
+ await cutoverRepository.updatePerson(person!, {
+ properties: { updated: 'value' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_identified: true,
+ created_at: TIMESTAMP,
+ })
+
+ // Verify update went to new table
+ const { rows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT properties FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [personId, team.id],
+ 'verifyNewTableUpdate'
+ )
+ expect(rows.length).toBe(1)
+ expect(rows[0].properties).toEqual({ updated: 'value' })
+
+ // Verify old table was NOT updated
+ const { rows: oldRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person WHERE id = $1 AND team_id = $2',
+ [personId, team.id],
+ 'verifyOldTableNotUpdated'
+ )
+ expect(oldRows.length).toBe(0)
+ })
+
+ it('should route deletes to correct table based on __useNewTable', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in new table
+ const personId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ personId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'To Delete' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-delete-new', personId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Fetch person (should have __useNewTable=true)
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-delete-new')
+ expect(person!.__useNewTable).toBe(true)
+
+ // Delete the person
+ await cutoverRepository.deletePerson(person!)
+
+ // Verify deletion happened in new table
+ const { rows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [personId, team.id],
+ 'verifyNewTableDelete'
+ )
+ expect(rows.length).toBe(0)
+ })
+
+ it('should not persist __useNewTable flag to database', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in new table
+ const personId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ personId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Test Person' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewTablePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['test-no-persist', personId, team.id, 0],
+ 'insertDistinctId'
+ )
+
+ // Fetch person (should have __useNewTable=true)
+ const person = await cutoverRepository.fetchPerson(team.id, 'test-no-persist')
+ expect(person!.__useNewTable).toBe(true)
+
+ // Update the person - the __useNewTable flag should NOT be written to DB
+ await cutoverRepository.updatePerson(person!, {
+ properties: { name: 'Updated Person' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_identified: true,
+ created_at: TIMESTAMP,
+ })
+
+ // Verify database doesn't have __useNewTable field
+ const { rows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [personId, team.id],
+ 'verifyNoUseNewTableField'
+ )
+ expect(rows.length).toBe(1)
+
+ // Check that the raw database row doesn't contain __useNewTable
+ const dbRow = rows[0]
+ expect(dbRow).not.toHaveProperty('__useNewTable')
+ expect(dbRow).not.toHaveProperty('__usenewTable')
+ expect(dbRow).not.toHaveProperty('__usenewtable')
+
+ // Verify the properties field doesn't contain __useNewTable
+ expect(dbRow.properties).toEqual({ name: 'Updated Person' })
+ expect(dbRow.properties).not.toHaveProperty('__useNewTable')
+ })
+ })
+
+ describe('fetchPersonsByDistinctIds()', () => {
+ it('should fetch multiple persons from old table only', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create multiple persons in old table
+ const oldId1 = 100
+ const oldUuid1 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId1,
+ oldUuid1,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Person 1' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['old-distinct-1', oldId1, team.id, 0],
+ 'insertOldDistinctId1'
+ )
+
+ const oldId2 = 200
+ const oldUuid2 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId2,
+ oldUuid2,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Person 2' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson2'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['old-distinct-2', oldId2, team.id, 0],
+ 'insertOldDistinctId2'
+ )
+
+ const oldId3 = 300
+ const oldUuid3 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId3,
+ oldUuid3,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Person 3' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson3'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['old-distinct-3', oldId3, team.id, 0],
+ 'insertOldDistinctId3'
+ )
+
+ const result = await cutoverRepository.fetchPersonsByDistinctIds([
+ { teamId: team.id, distinctId: 'old-distinct-1' },
+ { teamId: team.id, distinctId: 'old-distinct-2' },
+ { teamId: team.id, distinctId: 'old-distinct-3' },
+ ])
+
+ expect(result).toHaveLength(3)
+
+ const person1 = result.find((p) => p.distinct_id === 'old-distinct-1')
+ expect(person1).toBeDefined()
+ expect(person1!.id).toBe(String(oldId1))
+ expect(person1!.uuid).toBe(oldUuid1)
+ expect(person1!.properties.name).toBe('Old Person 1')
+
+ const person2 = result.find((p) => p.distinct_id === 'old-distinct-2')
+ expect(person2).toBeDefined()
+ expect(person2!.id).toBe(String(oldId2))
+ expect(person2!.uuid).toBe(oldUuid2)
+ expect(person2!.properties.name).toBe('Old Person 2')
+
+ const person3 = result.find((p) => p.distinct_id === 'old-distinct-3')
+ expect(person3).toBeDefined()
+ expect(person3!.id).toBe(String(oldId3))
+ expect(person3!.uuid).toBe(oldUuid3)
+ expect(person3!.properties.name).toBe('Old Person 3')
+ })
+
+ it('should fetch multiple persons from new table only', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create multiple persons in new table
+ const newId1 = ID_OFFSET + 100
+ const newUuid1 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId1,
+ newUuid1,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Person 1' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['new-distinct-1', newId1, team.id, 0],
+ 'insertNewDistinctId1'
+ )
+
+ const newId2 = ID_OFFSET + 200
+ const newUuid2 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId2,
+ newUuid2,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Person 2' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson2'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['new-distinct-2', newId2, team.id, 0],
+ 'insertNewDistinctId2'
+ )
+
+ const newId3 = ID_OFFSET + 300
+ const newUuid3 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId3,
+ newUuid3,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Person 3' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson3'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['new-distinct-3', newId3, team.id, 0],
+ 'insertNewDistinctId3'
+ )
+
+ const result = await cutoverRepository.fetchPersonsByDistinctIds([
+ { teamId: team.id, distinctId: 'new-distinct-1' },
+ { teamId: team.id, distinctId: 'new-distinct-2' },
+ { teamId: team.id, distinctId: 'new-distinct-3' },
+ ])
+
+ expect(result).toHaveLength(3)
+
+ const person1 = result.find((p) => p.distinct_id === 'new-distinct-1')
+ expect(person1).toBeDefined()
+ expect(person1!.id).toBe(String(newId1))
+ expect(person1!.uuid).toBe(newUuid1)
+ expect(person1!.properties.name).toBe('New Person 1')
+
+ const person2 = result.find((p) => p.distinct_id === 'new-distinct-2')
+ expect(person2).toBeDefined()
+ expect(person2!.id).toBe(String(newId2))
+ expect(person2!.uuid).toBe(newUuid2)
+ expect(person2!.properties.name).toBe('New Person 2')
+
+ const person3 = result.find((p) => p.distinct_id === 'new-distinct-3')
+ expect(person3).toBeDefined()
+ expect(person3!.id).toBe(String(newId3))
+ expect(person3!.uuid).toBe(newUuid3)
+ expect(person3!.properties.name).toBe('New Person 3')
+ })
+
+ it('should fetch persons from both tables with mixed IDs', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create multiple persons in old table
+ const oldId1 = 100
+ const oldUuid1 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId1,
+ oldUuid1,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old 1' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-old-1', oldId1, team.id, 0],
+ 'insertOldDistinctId1'
+ )
+
+ const oldId2 = 200
+ const oldUuid2 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId2,
+ oldUuid2,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old 2' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson2'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-old-2', oldId2, team.id, 0],
+ 'insertOldDistinctId2'
+ )
+
+ const oldId3 = 300
+ const oldUuid3 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ oldId3,
+ oldUuid3,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old 3' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson3'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-old-3', oldId3, team.id, 0],
+ 'insertOldDistinctId3'
+ )
+
+ // Create multiple persons in new table
+ const newId1 = ID_OFFSET + 100
+ const newUuid1 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId1,
+ newUuid1,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New 1' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-new-1', newId1, team.id, 0],
+ 'insertNewDistinctId1'
+ )
+
+ const newId2 = ID_OFFSET + 200
+ const newUuid2 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId2,
+ newUuid2,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New 2' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson2'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-new-2', newId2, team.id, 0],
+ 'insertNewDistinctId2'
+ )
+
+ const newId3 = ID_OFFSET + 300
+ const newUuid3 = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ newId3,
+ newUuid3,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New 3' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson3'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version)
+ VALUES ($1, $2, $3, $4)`,
+ ['mixed-new-3', newId3, team.id, 0],
+ 'insertNewDistinctId3'
+ )
+
+ // Fetch all persons - mix of old and new table persons
+ const result = await cutoverRepository.fetchPersonsByDistinctIds([
+ { teamId: team.id, distinctId: 'mixed-old-1' },
+ { teamId: team.id, distinctId: 'mixed-new-1' },
+ { teamId: team.id, distinctId: 'mixed-old-2' },
+ { teamId: team.id, distinctId: 'mixed-new-2' },
+ { teamId: team.id, distinctId: 'mixed-old-3' },
+ { teamId: team.id, distinctId: 'mixed-new-3' },
+ ])
+
+ expect(result).toHaveLength(6)
+
+ // Verify old table persons
+ const oldPerson1 = result.find((p) => p.distinct_id === 'mixed-old-1')
+ expect(oldPerson1).toBeDefined()
+ expect(oldPerson1!.id).toBe(String(oldId1))
+ expect(oldPerson1!.uuid).toBe(oldUuid1)
+ expect(oldPerson1!.properties.name).toBe('Old 1')
+
+ const oldPerson2 = result.find((p) => p.distinct_id === 'mixed-old-2')
+ expect(oldPerson2).toBeDefined()
+ expect(oldPerson2!.id).toBe(String(oldId2))
+ expect(oldPerson2!.uuid).toBe(oldUuid2)
+ expect(oldPerson2!.properties.name).toBe('Old 2')
+
+ const oldPerson3 = result.find((p) => p.distinct_id === 'mixed-old-3')
+ expect(oldPerson3).toBeDefined()
+ expect(oldPerson3!.id).toBe(String(oldId3))
+ expect(oldPerson3!.uuid).toBe(oldUuid3)
+ expect(oldPerson3!.properties.name).toBe('Old 3')
+
+ // Verify new table persons
+ const newPerson1 = result.find((p) => p.distinct_id === 'mixed-new-1')
+ expect(newPerson1).toBeDefined()
+ expect(newPerson1!.id).toBe(String(newId1))
+ expect(newPerson1!.uuid).toBe(newUuid1)
+ expect(newPerson1!.properties.name).toBe('New 1')
+
+ const newPerson2 = result.find((p) => p.distinct_id === 'mixed-new-2')
+ expect(newPerson2).toBeDefined()
+ expect(newPerson2!.id).toBe(String(newId2))
+ expect(newPerson2!.uuid).toBe(newUuid2)
+ expect(newPerson2!.properties.name).toBe('New 2')
+
+ const newPerson3 = result.find((p) => p.distinct_id === 'mixed-new-3')
+ expect(newPerson3).toBeDefined()
+ expect(newPerson3!.id).toBe(String(newId3))
+ expect(newPerson3!.uuid).toBe(newUuid3)
+ expect(newPerson3!.properties.name).toBe('New 3')
+ })
+
+ it('should return empty array when no persons found', async () => {
+ const team = await getFirstTeam(hub)
+ const result = await cutoverRepository.fetchPersonsByDistinctIds([
+ { teamId: team.id, distinctId: 'non-existent-1' },
+ { teamId: team.id, distinctId: 'non-existent-2' },
+ ])
+ expect(result).toEqual([])
+ })
+ })
+
+ describe('updatePerson()', () => {
+ it('should update person in old table when ID < offset', async () => {
+ const team = await getFirstTeam(hub)
+ const lowId = 100
+ const uuid = new UUIDT().toString()
+
+ // Insert person in old table
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Original Old' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson'
+ )
+
+ // Insert person with same ID in new table (to verify it's not updated)
+ const newTableUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ newTableUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Original New' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson'
+ )
+
+ const person = {
+ id: String(lowId),
+ uuid,
+ created_at: TIMESTAMP,
+ team_id: team.id,
+ properties: { name: 'Original Old' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_user_id: null,
+ is_identified: true,
+ version: 0,
+ }
+
+ const [updatedPerson] = await cutoverRepository.updatePerson(person, {
+ properties: { name: 'Updated Old' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_identified: true,
+ created_at: TIMESTAMP,
+ })
+
+ expect(updatedPerson.properties.name).toBe('Updated Old')
+ expect(updatedPerson.version).toBe(1)
+
+ // Verify update happened in old table
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE id = $1 AND team_id = $2`,
+ [lowId, team.id],
+ 'checkOldTableUpdate'
+ )
+ expect(oldTableResult.rows[0].properties.name).toBe('Updated Old')
+
+ // Verify new table was NOT updated
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE id = $1 AND team_id = $2`,
+ [lowId, team.id],
+ 'checkNewTableNotUpdated'
+ )
+ expect(newTableResult.rows[0].properties.name).toBe('Original New')
+ expect(Number(newTableResult.rows[0].version)).toBe(0)
+ })
+
+ it('should update person in new table when ID >= offset', async () => {
+ const team = await getFirstTeam(hub)
+ const highId = ID_OFFSET + 100
+ const uuid = new UUIDT().toString()
+
+ // Insert person in new table
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Original New' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson'
+ )
+
+ // Insert person with same ID in old table (to verify it's not updated)
+ const oldTableUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ oldTableUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Original Old' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson'
+ )
+
+ const person = {
+ id: String(highId),
+ uuid,
+ created_at: TIMESTAMP,
+ team_id: team.id,
+ properties: { name: 'Original New' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_user_id: null,
+ is_identified: true,
+ version: 0,
+ }
+
+ const [updatedPerson] = await cutoverRepository.updatePerson(person, {
+ properties: { name: 'Updated New' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_identified: true,
+ created_at: TIMESTAMP,
+ })
+
+ expect(updatedPerson.properties.name).toBe('Updated New')
+ expect(updatedPerson.version).toBe(1)
+
+ // Verify update happened in new table
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE id = $1 AND team_id = $2`,
+ [highId, team.id],
+ 'checkNewTableUpdate'
+ )
+ expect(newTableResult.rows[0].properties.name).toBe('Updated New')
+
+ // Verify old table was NOT updated
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE id = $1 AND team_id = $2`,
+ [highId, team.id],
+ 'checkOldTableNotUpdated'
+ )
+ expect(oldTableResult.rows[0].properties.name).toBe('Original Old')
+ expect(Number(oldTableResult.rows[0].version)).toBe(0)
+ })
+ })
+
+ describe('deletePerson()', () => {
+ it('should delete person from old table when ID < offset', async () => {
+ const team = await getFirstTeam(hub)
+ const lowId = 100
+ const uuid = new UUIDT().toString()
+
+ // Insert person in old table
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson'
+ )
+
+ // Insert person with same ID in new table (to verify it's not deleted)
+ const newTableUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ lowId,
+ newTableUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson'
+ )
+
+ const person = {
+ id: String(lowId),
+ uuid,
+ created_at: TIMESTAMP,
+ team_id: team.id,
+ properties: { name: 'Old Table' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_user_id: null,
+ is_identified: true,
+ version: 0,
+ }
+
+ await cutoverRepository.deletePerson(person)
+
+ // Verify person is deleted from old table
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE id = $1 AND team_id = $2`,
+ [lowId, team.id],
+ 'checkOldTableDelete'
+ )
+ expect(oldTableResult.rows).toHaveLength(0)
+
+ // Verify person in new table was NOT deleted
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE id = $1 AND team_id = $2`,
+ [lowId, team.id],
+ 'checkNewTableNotDeleted'
+ )
+ expect(newTableResult.rows).toHaveLength(1)
+ expect(newTableResult.rows[0].properties.name).toBe('New Table')
+ })
+
+ it('should delete person from new table when ID >= offset', async () => {
+ const team = await getFirstTeam(hub)
+ const highId = ID_OFFSET + 100
+ const uuid = new UUIDT().toString()
+
+ // Insert person in new table
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ uuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'New Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertNewPerson'
+ )
+
+ // Insert person with same ID in old table (to verify it's not deleted)
+ const oldTableUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [
+ highId,
+ oldTableUuid,
+ TIMESTAMP.toISO(),
+ team.id,
+ JSON.stringify({ name: 'Old Table' }),
+ '{}',
+ '{}',
+ null,
+ true,
+ 0,
+ ],
+ 'insertOldPerson'
+ )
+
+ const person = {
+ id: String(highId),
+ uuid,
+ created_at: TIMESTAMP,
+ team_id: team.id,
+ properties: { name: 'New Table' },
+ properties_last_updated_at: {},
+ properties_last_operation: {},
+ is_user_id: null,
+ is_identified: true,
+ version: 0,
+ }
+
+ await cutoverRepository.deletePerson(person)
+
+ // Verify person is deleted from new table
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE id = $1 AND team_id = $2`,
+ [highId, team.id],
+ 'checkNewTableDelete'
+ )
+ expect(newTableResult.rows).toHaveLength(0)
+
+ // Verify person in old table was NOT deleted
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE id = $1 AND team_id = $2`,
+ [highId, team.id],
+ 'checkOldTableNotDeleted'
+ )
+ expect(oldTableResult.rows).toHaveLength(1)
+ expect(oldTableResult.rows[0].properties.name).toBe('Old Table')
+ })
+ })
+
+ describe('addDistinctId()', () => {
+ it('should add distinct ID to person in old table', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in old table
+ const lowId = 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [lowId, uuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertOldTablePerson'
+ )
+
+ // Add distinct ID
+ await cutoverRepository.addDistinctId(
+ { id: String(lowId), uuid, team_id: team.id } as any,
+ 'new-distinct-id',
+ 0
+ )
+
+ // Verify distinct ID was added
+ const distinctIdResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE distinct_id = $1 AND team_id = $2`,
+ ['new-distinct-id', team.id],
+ 'checkDistinctId'
+ )
+ expect(distinctIdResult.rows).toHaveLength(1)
+ expect(distinctIdResult.rows[0].person_id).toBe(String(lowId))
+ })
+
+ it('should add distinct ID to person in new table', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create person in new table
+ const highId = ID_OFFSET + 100
+ const uuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [highId, uuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertNewTablePerson'
+ )
+
+ // Add distinct ID
+ await cutoverRepository.addDistinctId(
+ { id: String(highId), uuid, team_id: team.id } as any,
+ 'new-distinct-id',
+ 0
+ )
+
+ // Verify distinct ID was added
+ const distinctIdResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE distinct_id = $1 AND team_id = $2`,
+ ['new-distinct-id', team.id],
+ 'checkDistinctId'
+ )
+ expect(distinctIdResult.rows).toHaveLength(1)
+ expect(distinctIdResult.rows[0].person_id).toBe(String(highId))
+ })
+ })
+
+ describe('moveDistinctIds()', () => {
+ it('should move distinct IDs from old table person to old table person', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create source person in old table with distinct IDs
+ const sourceId = 100
+ const sourceUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [sourceId, sourceUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertSourcePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['source-distinct-1', sourceId, team.id, 0],
+ 'insertSourceDistinct1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['source-distinct-2', sourceId, team.id, 0],
+ 'insertSourceDistinct2'
+ )
+
+ // Create target person in old table
+ const targetId = 200
+ const targetUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [targetId, targetUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertTargetPerson'
+ )
+
+ // Move distinct IDs
+ await cutoverRepository.moveDistinctIds(
+ { id: String(sourceId), uuid: sourceUuid, team_id: team.id } as any,
+ { id: String(targetId), uuid: targetUuid, team_id: team.id } as any
+ )
+
+ // Verify distinct IDs were moved to target
+ const movedDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2 ORDER BY distinct_id`,
+ [targetId, team.id],
+ 'checkMovedDistincts'
+ )
+ expect(movedDistincts.rows).toHaveLength(2)
+ expect(movedDistincts.rows[0].distinct_id).toBe('source-distinct-1')
+ expect(movedDistincts.rows[1].distinct_id).toBe('source-distinct-2')
+
+ // Verify source person has no distinct IDs
+ const sourceDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2`,
+ [sourceId, team.id],
+ 'checkSourceDistincts'
+ )
+ expect(sourceDistincts.rows).toHaveLength(0)
+ })
+
+ it('should move distinct IDs from new table person to new table person', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create source person in new table with distinct IDs
+ const sourceId = ID_OFFSET + 100
+ const sourceUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [sourceId, sourceUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertSourcePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['source-distinct-1', sourceId, team.id, 0],
+ 'insertSourceDistinct1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['source-distinct-2', sourceId, team.id, 0],
+ 'insertSourceDistinct2'
+ )
+
+ // Create target person in new table
+ const targetId = ID_OFFSET + 200
+ const targetUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [targetId, targetUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertTargetPerson'
+ )
+
+ // Move distinct IDs
+ await cutoverRepository.moveDistinctIds(
+ { id: String(sourceId), uuid: sourceUuid, team_id: team.id } as any,
+ { id: String(targetId), uuid: targetUuid, team_id: team.id } as any
+ )
+
+ // Verify distinct IDs were moved to target
+ const movedDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2 ORDER BY distinct_id`,
+ [targetId, team.id],
+ 'checkMovedDistincts'
+ )
+ expect(movedDistincts.rows).toHaveLength(2)
+ expect(movedDistincts.rows[0].distinct_id).toBe('source-distinct-1')
+ expect(movedDistincts.rows[1].distinct_id).toBe('source-distinct-2')
+
+ // Verify source person has no distinct IDs
+ const sourceDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2`,
+ [sourceId, team.id],
+ 'checkSourceDistincts'
+ )
+ expect(sourceDistincts.rows).toHaveLength(0)
+ })
+
+ it('should move distinct IDs from old table person to new table person (cross-table merge)', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create source person in old table with distinct IDs
+ const sourceId = 100
+ const sourceUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [sourceId, sourceUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertSourcePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['old-distinct-1', sourceId, team.id, 0],
+ 'insertSourceDistinct1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['old-distinct-2', sourceId, team.id, 0],
+ 'insertSourceDistinct2'
+ )
+
+ // Create target person in new table
+ const targetId = ID_OFFSET + 100
+ const targetUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [targetId, targetUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertTargetPerson'
+ )
+
+ // Move distinct IDs from old to new table person
+ await cutoverRepository.moveDistinctIds(
+ { id: String(sourceId), uuid: sourceUuid, team_id: team.id } as any,
+ { id: String(targetId), uuid: targetUuid, team_id: team.id } as any
+ )
+
+ // Verify distinct IDs were moved to target in new table
+ const movedDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2 ORDER BY distinct_id`,
+ [targetId, team.id],
+ 'checkMovedDistincts'
+ )
+ expect(movedDistincts.rows).toHaveLength(2)
+ expect(movedDistincts.rows[0].distinct_id).toBe('old-distinct-1')
+ expect(movedDistincts.rows[1].distinct_id).toBe('old-distinct-2')
+
+ // Verify source person has no distinct IDs
+ const sourceDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2`,
+ [sourceId, team.id],
+ 'checkSourceDistincts'
+ )
+ expect(sourceDistincts.rows).toHaveLength(0)
+ })
+
+ it('should move distinct IDs from new table person to old table person (cross-table merge)', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create source person in new table with distinct IDs
+ const sourceId = ID_OFFSET + 100
+ const sourceUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO ${NEW_TABLE_NAME} (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [sourceId, sourceUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertSourcePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['new-distinct-1', sourceId, team.id, 0],
+ 'insertSourceDistinct1'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['new-distinct-2', sourceId, team.id, 0],
+ 'insertSourceDistinct2'
+ )
+
+ // Create target person in old table
+ const targetId = 100
+ const targetUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [targetId, targetUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertTargetPerson'
+ )
+
+ // Move distinct IDs from new to old table person
+ await cutoverRepository.moveDistinctIds(
+ { id: String(sourceId), uuid: sourceUuid, team_id: team.id } as any,
+ { id: String(targetId), uuid: targetUuid, team_id: team.id } as any
+ )
+
+ // Verify distinct IDs were moved to target in old table
+ const movedDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2 ORDER BY distinct_id`,
+ [targetId, team.id],
+ 'checkMovedDistincts'
+ )
+ expect(movedDistincts.rows).toHaveLength(2)
+ expect(movedDistincts.rows[0].distinct_id).toBe('new-distinct-1')
+ expect(movedDistincts.rows[1].distinct_id).toBe('new-distinct-2')
+
+ // Verify source person has no distinct IDs
+ const sourceDistincts = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_persondistinctid WHERE person_id = $1 AND team_id = $2`,
+ [sourceId, team.id],
+ 'checkSourceDistincts'
+ )
+ expect(sourceDistincts.rows).toHaveLength(0)
+ })
+
+ it('should handle full merge with __useNewTable routing (fetch + delete)', async () => {
+ const team = await getFirstTeam(hub)
+
+ // Create source person in old table
+ const sourceId = 100
+ const sourceUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [sourceId, sourceUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertSourcePerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['source-distinct', sourceId, team.id, 0],
+ 'insertSourceDistinct'
+ )
+
+ // Create target person in new table
+ const targetId = 200
+ const targetUuid = new UUIDT().toString()
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_person_new (id, uuid, created_at, team_id, properties, properties_last_updated_at, properties_last_operation, is_user_id, is_identified, version)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`,
+ [targetId, targetUuid, TIMESTAMP.toISO(), team.id, JSON.stringify({}), '{}', '{}', null, true, 0],
+ 'insertTargetPerson'
+ )
+ await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4)`,
+ ['target-distinct', targetId, team.id, 0],
+ 'insertTargetDistinct'
+ )
+
+ // Fetch both persons
+ // With opportunistic copy, both will have __useNewTable=true after fetch
+ const sourcePerson = await cutoverRepository.fetchPerson(team.id, 'source-distinct')
+ const targetPerson = await cutoverRepository.fetchPerson(team.id, 'target-distinct')
+
+ // After opportunistic copy, source is now in new table too
+ expect(sourcePerson!.__useNewTable).toBe(true) // copied to new table
+ expect(targetPerson!.__useNewTable).toBe(true) // already in new table
+
+ // Move distinct IDs from source to target
+ await cutoverRepository.moveDistinctIds(sourcePerson!, targetPerson!)
+
+ // Delete source person - should route to NEW table based on __useNewTable flag
+ await cutoverRepository.deletePerson(sourcePerson!)
+
+ // Verify source was deleted from new table
+ const { rows: newTableRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [sourceId, team.id],
+ 'verifySourceDeletedFromNew'
+ )
+ expect(newTableRows.length).toBe(0)
+
+ // Verify target still exists in new table
+ const { rows: targetRows } = await postgres.query(
+ PostgresUse.PERSONS_READ,
+ 'SELECT * FROM posthog_person_new WHERE id = $1 AND team_id = $2',
+ [targetId, team.id],
+ 'verifyTargetStillExists'
+ )
+ expect(targetRows.length).toBe(1)
+ })
+ })
+
+ describe('Cutover disabled', () => {
+ it('should use old table when cutover is disabled', async () => {
+ const disabledRepository = new PostgresPersonRepository(postgres, {
+ calculatePropertiesSize: 0,
+ personPropertiesDbConstraintLimitBytes: 1024 * 1024,
+ personPropertiesTrimTargetBytes: 512 * 1024,
+ tableCutoverEnabled: false,
+ newTableName: NEW_TABLE_NAME,
+ newTableIdOffset: ID_OFFSET,
+ })
+
+ const team = await getFirstTeam(hub)
+ const uuid = new UUIDT().toString()
+
+ const result = await disabledRepository.createPerson(
+ TIMESTAMP,
+ { name: 'Disabled Cutover' },
+ {},
+ {},
+ team.id,
+ null,
+ true,
+ uuid,
+ [{ distinctId: 'disabled-distinct' }]
+ )
+
+ expect(result.success).toBe(true)
+
+ // Verify person is in old table
+ const oldTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM posthog_person WHERE uuid = $1`,
+ [uuid],
+ 'checkOldTable'
+ )
+ expect(oldTableResult.rows).toHaveLength(1)
+
+ // Verify person is NOT in new table
+ const newTableResult = await postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ `SELECT * FROM ${NEW_TABLE_NAME} WHERE uuid = $1`,
+ [uuid],
+ 'checkNewTable'
+ )
+ expect(newTableResult.rows).toHaveLength(0)
+ })
+ })
+ })
})
// Helper function from the original test file
diff --git a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.ts b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.ts
index 89528a6148..844f1e270a 100644
--- a/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.ts
+++ b/plugin-server/src/worker/ingestion/persons/repositories/postgres-person-repository.ts
@@ -8,6 +8,7 @@ import { TopicMessage } from '../../../../kafka/producer'
import {
InternalPerson,
PersonDistinctId,
+ PersonUpdateFields,
PropertiesLastOperation,
PropertiesLastUpdatedAt,
RawPerson,
@@ -24,7 +25,11 @@ import { PostgresRouter, PostgresUse, TransactionClient } from '../../../../util
import { generateKafkaPersonUpdateMessage, sanitizeJsonbValue, unparsePersonPartial } from '../../../../utils/db/utils'
import { logger } from '../../../../utils/logger'
import { NoRowsUpdatedError, sanitizeSqlIdentifier } from '../../../../utils/utils'
-import { oversizedPersonPropertiesTrimmedCounter, personPropertiesSizeViolationCounter } from '../metrics'
+import {
+ oversizedPersonPropertiesTrimmedCounter,
+ personJsonFieldSizeHistogram,
+ personPropertiesSizeViolationCounter,
+} from '../metrics'
import { canTrimProperty } from '../person-property-utils'
import { PersonUpdate } from '../person-update-batch'
import { InternalPersonWithDistinctId, PersonPropertiesSizeViolationError, PersonRepository } from './person-repository'
@@ -41,12 +46,21 @@ export interface PostgresPersonRepositoryOptions {
personPropertiesDbConstraintLimitBytes: number
/** Target JSON size (stringified) to trim down to when remediating oversized properties */
personPropertiesTrimTargetBytes: number
+ /** Enable person table cutover migration */
+ tableCutoverEnabled?: boolean
+ /** New person table name for cutover migration */
+ newTableName?: string
+ /** Person ID offset threshold - person IDs >= this value route to new table */
+ newTableIdOffset?: number
}
const DEFAULT_OPTIONS: PostgresPersonRepositoryOptions = {
calculatePropertiesSize: 0,
personPropertiesDbConstraintLimitBytes: DEFAULT_PERSON_PROPERTIES_DB_CONSTRAINT_LIMIT_BYTES,
personPropertiesTrimTargetBytes: DEFAULT_PERSON_PROPERTIES_TRIM_TARGET_BYTES,
+ tableCutoverEnabled: false,
+ newTableName: 'posthog_person_new',
+ newTableIdOffset: Number.MAX_SAFE_INTEGER,
}
export class PostgresPersonRepository
@@ -61,12 +75,36 @@ export class PostgresPersonRepository
this.options = { ...DEFAULT_OPTIONS, ...options }
}
+ private getTableName(personId?: string, person?: InternalPerson): string {
+ if (!this.options.tableCutoverEnabled || !this.options.newTableName || !this.options.newTableIdOffset) {
+ return 'posthog_person'
+ }
+
+ // If person object provided with routing decision, use it
+ if (person?.__useNewTable !== undefined) {
+ return person.__useNewTable ? this.options.newTableName : 'posthog_person'
+ }
+
+ // Fall back to ID-based routing
+ if (!personId) {
+ return 'posthog_person'
+ }
+
+ const numericPersonId = parseInt(personId, 10)
+ if (isNaN(numericPersonId)) {
+ return 'posthog_person'
+ }
+
+ // Always return unsanitized name - callers must sanitize before SQL interpolation
+ return numericPersonId >= this.options.newTableIdOffset ? this.options.newTableName : 'posthog_person'
+ }
+
private async handleOversizedPersonProperties(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tx?: TransactionClient
): Promise<[InternalPerson, TopicMessage[], boolean]> {
- const currentSize = await this.personPropertiesSize(person.id)
+ const currentSize = await this.personPropertiesSize(person.id, person.team_id, person)
if (currentSize >= this.options.personPropertiesDbConstraintLimitBytes) {
try {
@@ -109,7 +147,7 @@ export class PostgresPersonRepository
private async handleExistingOversizedRecord(
person: InternalPerson,
- update: Partial,
+ update: PersonUpdateFields,
tx?: TransactionClient
): Promise<[InternalPerson, TopicMessage[], boolean]> {
try {
@@ -121,7 +159,7 @@ export class PostgresPersonRepository
{ teamId: person.team_id, personId: person.id }
)
- const trimmedUpdate: Partial = {
+ const trimmedUpdate: PersonUpdateFields = {
...update,
properties: trimmedProperties,
}
@@ -217,38 +255,173 @@ export class PostgresPersonRepository
throw new Error("can't enable both forUpdate and useReadReplica in db::fetchPerson")
}
- let queryString = `SELECT
- posthog_person.id,
- posthog_person.uuid,
- posthog_person.created_at,
- posthog_person.team_id,
- posthog_person.properties,
- posthog_person.properties_last_updated_at,
- posthog_person.properties_last_operation,
- posthog_person.is_user_id,
- posthog_person.version,
- posthog_person.is_identified
- FROM posthog_person
- JOIN posthog_persondistinctid ON (posthog_persondistinctid.person_id = posthog_person.id)
- WHERE
- posthog_person.team_id = $1
- AND posthog_persondistinctid.team_id = $1
- AND posthog_persondistinctid.distinct_id = $2`
- if (options.forUpdate) {
- // Locks the teamId and distinctId tied to this personId + this person's info
- queryString = queryString.concat(` FOR UPDATE`)
- }
- const values = [teamId, distinctId]
+ if (this.options.tableCutoverEnabled && this.options.newTableName && this.options.newTableIdOffset) {
+ // First, get the person_id from posthog_persondistinctid
+ const distinctIdQuery = `
+ SELECT person_id
+ FROM posthog_persondistinctid
+ WHERE team_id = $1 AND distinct_id = $2
+ LIMIT 1`
- const { rows } = await this.postgres.query(
- options.useReadReplica ? PostgresUse.PERSONS_READ : PostgresUse.PERSONS_WRITE,
- queryString,
- values,
- 'fetchPerson'
- )
+ const { rows: distinctIdRows } = await this.postgres.query<{ person_id: string }>(
+ options.useReadReplica ? PostgresUse.PERSONS_READ : PostgresUse.PERSONS_WRITE,
+ distinctIdQuery,
+ [teamId, distinctId],
+ 'fetchPersonDistinctIdMapping'
+ )
- if (rows.length > 0) {
- return this.toPerson(rows[0])
+ if (distinctIdRows.length === 0) {
+ return undefined
+ }
+
+ const personId = distinctIdRows[0].person_id
+ const forUpdateClause = options.forUpdate ? ' FOR UPDATE' : ''
+
+ // Check new table first (by existence, not by ID threshold)
+ const newTableName = sanitizeSqlIdentifier(this.options.newTableName)
+ const personQueryNew = `
+ SELECT
+ id,
+ uuid,
+ created_at,
+ team_id,
+ properties,
+ properties_last_updated_at,
+ properties_last_operation,
+ is_user_id,
+ version,
+ is_identified
+ FROM ${newTableName}
+ WHERE team_id = $1 AND id = $2${forUpdateClause}`
+
+ const { rows: newTableRows } = await this.postgres.query(
+ options.useReadReplica ? PostgresUse.PERSONS_READ : PostgresUse.PERSONS_WRITE,
+ personQueryNew,
+ [teamId, personId],
+ 'fetchPersonFromNewTable'
+ )
+
+ if (newTableRows.length > 0) {
+ const person = this.toPerson(newTableRows[0])
+ // Mark that this person exists in the new table
+ ;(person as any).__useNewTable = true
+ return person
+ }
+
+ // Fall back to old table
+ const personQueryOld = `
+ SELECT
+ id,
+ uuid,
+ created_at,
+ team_id,
+ properties,
+ properties_last_updated_at,
+ properties_last_operation,
+ is_user_id,
+ version,
+ is_identified
+ FROM posthog_person
+ WHERE team_id = $1 AND id = $2${forUpdateClause}`
+
+ const { rows: oldTableRows } = await this.postgres.query(
+ options.useReadReplica ? PostgresUse.PERSONS_READ : PostgresUse.PERSONS_WRITE,
+ personQueryOld,
+ [teamId, personId],
+ 'fetchPersonFromOldTable'
+ )
+
+ if (oldTableRows.length > 0) {
+ const person = this.toPerson(oldTableRows[0])
+
+ // Opportunistically copy person to new table
+ // This allows all future operations to go directly to new table (avoiding slow triggers)
+ // Skip copy when using read replica to maintain read-only intent
+ if (!options.useReadReplica) {
+ try {
+ const copyQuery = `
+ INSERT INTO ${newTableName} (
+ id,
+ uuid,
+ created_at,
+ team_id,
+ properties,
+ properties_last_updated_at,
+ properties_last_operation,
+ is_user_id,
+ version,
+ is_identified
+ )
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
+ ON CONFLICT (team_id, id) DO NOTHING
+ RETURNING id`
+
+ await this.postgres.query(
+ PostgresUse.PERSONS_WRITE,
+ copyQuery,
+ [
+ person.id,
+ person.uuid,
+ person.created_at.toISO(),
+ person.team_id,
+ sanitizeJsonbValue(person.properties),
+ sanitizeJsonbValue(person.properties_last_updated_at),
+ sanitizeJsonbValue(person.properties_last_operation),
+ person.is_user_id,
+ person.version,
+ person.is_identified,
+ ],
+ 'copyPersonToNewTable'
+ )
+
+ // Person is now in new table, future operations can use it
+ ;(person as any).__useNewTable = true
+ } catch (error) {
+ // If copy fails for any reason, log but continue with old table routing
+ logger.warn('Failed to copy person to new table', {
+ error: error instanceof Error ? error.message : String(error),
+ person_id: person.id,
+ team_id: person.team_id,
+ })
+ ;(person as any).__useNewTable = false
+ }
+ } else {
+ // When using read replica, don't attempt write operation
+ ;(person as any).__useNewTable = false
+ }
+
+ return person
+ }
+ } else {
+ const forUpdateClause = options.forUpdate ? ' FOR UPDATE' : ''
+ const queryString = `SELECT
+ posthog_person.id,
+ posthog_person.uuid,
+ posthog_person.created_at,
+ posthog_person.team_id,
+ posthog_person.properties,
+ posthog_person.properties_last_updated_at,
+ posthog_person.properties_last_operation,
+ posthog_person.is_user_id,
+ posthog_person.version,
+ posthog_person.is_identified
+ FROM posthog_person
+ JOIN posthog_persondistinctid ON (posthog_persondistinctid.person_id = posthog_person.id)
+ WHERE
+ posthog_person.team_id = $1
+ AND posthog_persondistinctid.team_id = $1
+ AND posthog_persondistinctid.distinct_id = $2${forUpdateClause}`
+
+ const { rows } = await this.postgres.query(
+ options.useReadReplica ? PostgresUse.PERSONS_READ : PostgresUse.PERSONS_WRITE,
+ queryString,
+ [teamId, distinctId],
+ 'fetchPerson'
+ )
+
+ if (rows.length > 0) {
+ return this.toPerson(rows[0])
+ }
}
}
@@ -259,45 +432,187 @@ export class PostgresPersonRepository
return []
}
- // Build the WHERE clause for multiple team_id, distinct_id pairs
- const conditions = teamPersons
- .map((_, index) => {
- const teamIdParam = index * 2 + 1
- const distinctIdParam = index * 2 + 2
- return `(posthog_persondistinctid.team_id = $${teamIdParam} AND posthog_persondistinctid.distinct_id = $${distinctIdParam})`
- })
- .join(' OR ')
-
- const queryString = `SELECT
- posthog_person.id,
- posthog_person.uuid,
- posthog_person.created_at,
- posthog_person.team_id,
- posthog_person.properties,
- posthog_person.properties_last_updated_at,
- posthog_person.properties_last_operation,
- posthog_person.is_user_id,
- posthog_person.version,
- posthog_person.is_identified,
- posthog_persondistinctid.distinct_id
- FROM posthog_person
- JOIN posthog_persondistinctid ON (posthog_persondistinctid.person_id = posthog_person.id)
- WHERE ${conditions}`
-
- // Flatten the parameters: [teamId1, distinctId1, teamId2, distinctId2, ...]
const params = teamPersons.flatMap((person) => [person.teamId, person.distinctId])
- const { rows } = await this.postgres.query |