mirror of
https://github.com/BillyOutlast/posthog.git
synced 2026-02-04 03:01:23 +01:00
feat: Add Revenue Analytics v0 (#30895)
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Binary file not shown.
|
Before Width: | Height: | Size: 125 KiB After Width: | Height: | Size: 115 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 126 KiB After Width: | Height: | Size: 117 KiB |
@@ -73,6 +73,7 @@
|
||||
"@posthog/products-persons": "workspace:*",
|
||||
"@posthog/products-product-analytics": "workspace:*",
|
||||
"@posthog/products-replay": "workspace:*",
|
||||
"@posthog/products-revenue-analytics": "workspace:*",
|
||||
"@posthog/products-web-analytics": "workspace:*",
|
||||
"@posthog/rrweb": "0.0.13",
|
||||
"@posthog/rrweb-plugin-console-record": "0.0.13",
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
IconNotebook,
|
||||
IconPeople,
|
||||
IconPieChart,
|
||||
IconPiggyBank,
|
||||
IconPlug,
|
||||
IconPlusSmall,
|
||||
IconRewindPlay,
|
||||
@@ -642,6 +643,15 @@ export const navigation3000Logic = kea<navigation3000LogicType>([
|
||||
tag: 'alpha' as const,
|
||||
}
|
||||
: null,
|
||||
featureFlags[FEATURE_FLAGS.REVENUE_ANALYTICS]
|
||||
? {
|
||||
identifier: Scene.RevenueAnalytics,
|
||||
label: 'Revenue analytics',
|
||||
icon: <IconPiggyBank />,
|
||||
to: urls.revenueAnalytics(),
|
||||
tag: 'beta' as const,
|
||||
}
|
||||
: null,
|
||||
].filter(isNotNil) as NavbarItem[],
|
||||
]
|
||||
},
|
||||
|
||||
@@ -118,7 +118,7 @@ export const getDefaultTreeExplore = (groupNodes: FileSystemImport[]): FileSyste
|
||||
{
|
||||
path: 'Data management/Revenue',
|
||||
icon: <IconHandMoney />,
|
||||
href: () => urls.revenue(),
|
||||
href: () => urls.revenueSettings(),
|
||||
flag: FEATURE_FLAGS.WEB_REVENUE_TRACKING,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -108,15 +108,6 @@ export const MOCK_DEFAULT_TEAM: TeamType = {
|
||||
revenueCurrencyProperty: { static: CurrencyCode.ZAR },
|
||||
},
|
||||
],
|
||||
dataWarehouseTables: [
|
||||
{
|
||||
tableName: 'mock_table',
|
||||
distinctIdColumn: 'distinct_id',
|
||||
timestampColumn: 'timestamp',
|
||||
revenueColumn: 'revenue',
|
||||
revenueCurrencyColumn: { static: CurrencyCode.SCR },
|
||||
},
|
||||
],
|
||||
},
|
||||
flags_persistence_default: false,
|
||||
access_control_version: 'v1',
|
||||
|
||||
@@ -240,6 +240,7 @@ export const FEATURE_FLAGS = {
|
||||
SESSION_RECORDINGS_PLAYLIST_COUNT_COLUMN: 'session-recordings-playlist-count-column', // owner: @pauldambra #team-replay
|
||||
ONBOARDING_NEW_INSTALLATION_STEP: 'onboarding-new-installation-step', // owner: @joshsny #team-growth
|
||||
WEB_ANALYTICS_PAGE_REPORTS: 'web-analytics-page-reports', // owner: @lricoy #team-web-analytics
|
||||
REVENUE_ANALYTICS: 'revenue-analytics', // owner: @rafaeelaudibert #team-revenue-analytics
|
||||
SUPPORT_FORM_IN_ONBOARDING: 'support-form-in-onboarding', // owner: @joshsny #team-growth
|
||||
AI_SETUP_WIZARD: 'ai-setup-wizard', // owner: @joshsny #team-growth
|
||||
CRM_BLOCKING_QUERIES: 'crm-blocking-queries', // owner: @danielbachhuber #team-crm
|
||||
@@ -247,7 +248,6 @@ export const FEATURE_FLAGS = {
|
||||
RECORDINGS_SIMILAR_RECORDINGS: 'recordings-similar-recordings', // owner: @veryayskiy #team-replay
|
||||
RECORDINGS_BLOBBY_V2_REPLAY: 'recordings-blobby-v2-replay', // owner: @pl #team-cdp
|
||||
SETTINGS_SESSIONS_V2_JOIN: 'settings-sessions-v2-join', // owner: @robbie-c #team-web-analytics
|
||||
WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS: 'web-analytics-data-warehouse-revenue-settings', // owner: @rafaeelaudibert #team-web-analytics
|
||||
SAVE_INSIGHT_TASK: 'save-insight-task', // owner: @joshsny #team-growth
|
||||
B2B_ANALYTICS: 'b2b-analytics', // owner: @danielbachhuber #team-crm
|
||||
DASHBOARD_COLORS: 'dashboard-colors', // owner: @thmsobrmlr #team-product-analytics
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
IconNotebook,
|
||||
IconPerson,
|
||||
IconPieChart,
|
||||
IconPiggyBank,
|
||||
IconRewindPlay,
|
||||
IconRocket,
|
||||
IconTestTube,
|
||||
@@ -42,6 +43,7 @@ export const productScenes: Record<string, () => Promise<any>> = {
|
||||
MessagingCampaigns: () => import('../../products/messaging/frontend/Campaigns'),
|
||||
MessagingBroadcasts: () => import('../../products/messaging/frontend/Broadcasts'),
|
||||
MessagingLibrary: () => import('../../products/messaging/frontend/Library'),
|
||||
RevenueAnalytics: () => import('../../products/revenue_analytics/frontend/RevenueAnalyticsScene'),
|
||||
}
|
||||
|
||||
/** This const is auto-generated, as is the whole file */
|
||||
@@ -63,6 +65,7 @@ export const productRoutes: Record<string, [string, string]> = {
|
||||
'/messaging/library': ['MessagingLibrary', 'messagingLibrary'],
|
||||
'/messaging/library/new': ['MessagingLibrary', 'messagingLibraryNew'],
|
||||
'/messaging/library/:id': ['MessagingLibrary', 'messagingLibraryTemplate'],
|
||||
'/revenue_analytics': ['RevenueAnalytics', 'revenueAnalytics'],
|
||||
}
|
||||
|
||||
/** This const is auto-generated, as is the whole file */
|
||||
@@ -109,6 +112,12 @@ export const productConfiguration: Record<string, any> = {
|
||||
MessagingCampaigns: { name: 'Messaging', projectBased: true },
|
||||
MessagingBroadcasts: { name: 'Messaging', projectBased: true },
|
||||
MessagingLibrary: { name: 'Messaging', projectBased: true },
|
||||
RevenueAnalytics: {
|
||||
name: 'Revenue Analytics',
|
||||
projectBased: true,
|
||||
defaultDocsPath: '/docs/revenue-analytics',
|
||||
activityScope: 'RevenueAnalytics',
|
||||
},
|
||||
}
|
||||
|
||||
/** This const is auto-generated, as is the whole file */
|
||||
@@ -245,6 +254,7 @@ export const productUrls = {
|
||||
replaySingle: (id: string): string => `/replay/${id}`,
|
||||
replayFilePlayback: (): string => '/replay/file-playback',
|
||||
replaySettings: (sectionId?: string): string => `/replay/settings${sectionId ? `?sectionId=${sectionId}` : ''}`,
|
||||
revenueAnalytics: (): string => '/revenue_analytics',
|
||||
webAnalytics: (): string => `/web`,
|
||||
webAnalyticsWebVitals: (): string => `/web/web-vitals`,
|
||||
webAnalyticsPageReports: (): string => `/web/page-reports`,
|
||||
@@ -282,6 +292,7 @@ export const treeItemsNew = [
|
||||
export const treeItemsExplore = [
|
||||
{ path: 'Data management/Actions', icon: <IconRocket />, href: () => urls.actions() },
|
||||
{ path: 'Early access features', icon: <IconRocket />, href: () => urls.earlyAccessFeatures() },
|
||||
{ path: 'Explore/Revenue analytics', icon: <IconPiggyBank />, href: () => urls.revenueAnalytics() },
|
||||
{ path: 'People and groups/People', icon: <IconPerson />, href: () => urls.persons() },
|
||||
{ path: 'Recordings/Playlists', href: () => urls.replay(ReplayTabs.Playlists), icon: <IconRewindPlay /> },
|
||||
{ path: 'Recordings/Recordings', href: () => urls.replay(ReplayTabs.Home), icon: <IconRewindPlay /> },
|
||||
|
||||
@@ -8,8 +8,8 @@ import { LemonBanner } from 'lib/lemon-ui/LemonBanner'
|
||||
import { Tooltip } from 'lib/lemon-ui/Tooltip'
|
||||
import { humanFriendlyDuration, humanFriendlyLargeNumber, isNotNil, range } from 'lib/utils'
|
||||
import { getCurrencySymbol } from 'lib/utils/geography/currency'
|
||||
import { revenueEventsSettingsLogic } from 'products/revenue_analytics/frontend/settings/revenueEventsSettingsLogic'
|
||||
import { useState } from 'react'
|
||||
import { revenueEventsSettingsLogic } from 'scenes/data-management/revenue/revenueEventsSettingsLogic'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
import { EvenlyDistributedRows } from '~/queries/nodes/WebOverview/EvenlyDistributedRows'
|
||||
@@ -224,7 +224,7 @@ const settingsLinkFromKey = (key: string): string | null => {
|
||||
switch (key) {
|
||||
case 'revenue':
|
||||
case 'conversion revenue':
|
||||
return urls.revenue()
|
||||
return urls.revenueSettings()
|
||||
default:
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -15140,12 +15140,9 @@
|
||||
},
|
||||
"response": {
|
||||
"$ref": "#/definitions/RevenueExampleDataWarehouseTablesQueryResponse"
|
||||
},
|
||||
"revenueTrackingConfig": {
|
||||
"$ref": "#/definitions/RevenueTrackingConfig"
|
||||
}
|
||||
},
|
||||
"required": ["kind", "revenueTrackingConfig"],
|
||||
"required": ["kind"],
|
||||
"type": "object"
|
||||
},
|
||||
"RevenueExampleDataWarehouseTablesQueryResponse": {
|
||||
@@ -15215,12 +15212,9 @@
|
||||
},
|
||||
"response": {
|
||||
"$ref": "#/definitions/RevenueExampleEventsQueryResponse"
|
||||
},
|
||||
"revenueTrackingConfig": {
|
||||
"$ref": "#/definitions/RevenueTrackingConfig"
|
||||
}
|
||||
},
|
||||
"required": ["kind", "revenueTrackingConfig"],
|
||||
"required": ["kind"],
|
||||
"type": "object"
|
||||
},
|
||||
"RevenueExampleEventsQueryResponse": {
|
||||
@@ -15278,13 +15272,6 @@
|
||||
"$ref": "#/definitions/CurrencyCode",
|
||||
"default": "USD"
|
||||
},
|
||||
"dataWarehouseTables": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/RevenueTrackingDataWarehouseTable"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"events": {
|
||||
"default": [],
|
||||
"items": {
|
||||
@@ -15293,32 +15280,7 @@
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": ["baseCurrency", "events", "dataWarehouseTables"],
|
||||
"type": "object"
|
||||
},
|
||||
"RevenueTrackingDataWarehouseTable": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"distinctIdColumn": {
|
||||
"type": "string"
|
||||
},
|
||||
"revenueColumn": {
|
||||
"type": "string"
|
||||
},
|
||||
"revenueCurrencyColumn": {
|
||||
"$ref": "#/definitions/RevenueCurrencyPropertyConfig",
|
||||
"default": {
|
||||
"static": "USD"
|
||||
}
|
||||
},
|
||||
"tableName": {
|
||||
"type": "string"
|
||||
},
|
||||
"timestampColumn": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["tableName", "distinctIdColumn", "timestampColumn", "revenueColumn", "revenueCurrencyColumn"],
|
||||
"required": ["baseCurrency", "events"],
|
||||
"type": "object"
|
||||
},
|
||||
"RevenueTrackingEventItem": {
|
||||
|
||||
@@ -1763,7 +1763,6 @@ export type CachedSessionAttributionExplorerQueryResponse = CachedQueryResponse<
|
||||
|
||||
export interface RevenueExampleEventsQuery extends DataNode<RevenueExampleEventsQueryResponse> {
|
||||
kind: NodeKind.RevenueExampleEventsQuery
|
||||
revenueTrackingConfig: RevenueTrackingConfig
|
||||
limit?: integer
|
||||
offset?: integer
|
||||
}
|
||||
@@ -1780,7 +1779,6 @@ export type CachedRevenueExampleEventsQueryResponse = CachedQueryResponse<Revenu
|
||||
export interface RevenueExampleDataWarehouseTablesQuery
|
||||
extends DataNode<RevenueExampleDataWarehouseTablesQueryResponse> {
|
||||
kind: NodeKind.RevenueExampleDataWarehouseTablesQuery
|
||||
revenueTrackingConfig: RevenueTrackingConfig
|
||||
limit?: integer
|
||||
offset?: integer
|
||||
}
|
||||
@@ -2775,18 +2773,6 @@ export interface RevenueTrackingEventItem {
|
||||
revenueCurrencyProperty: RevenueCurrencyPropertyConfig
|
||||
}
|
||||
|
||||
export interface RevenueTrackingDataWarehouseTable {
|
||||
tableName: string
|
||||
distinctIdColumn: string
|
||||
timestampColumn: string
|
||||
revenueColumn: string
|
||||
|
||||
/**
|
||||
* @default {"static": "USD"}
|
||||
*/
|
||||
revenueCurrencyColumn: RevenueCurrencyPropertyConfig
|
||||
}
|
||||
|
||||
export interface RevenueTrackingConfig {
|
||||
/**
|
||||
* @default 'USD'
|
||||
@@ -2797,9 +2783,4 @@ export interface RevenueTrackingConfig {
|
||||
* @default []
|
||||
*/
|
||||
events: RevenueTrackingEventItem[]
|
||||
|
||||
/**
|
||||
* @default []
|
||||
*/
|
||||
dataWarehouseTables: RevenueTrackingDataWarehouseTable[]
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ export const appScenes: Record<Scene | string, () => any> = {
|
||||
[Scene.Insight]: () => import('./insights/InsightScene'),
|
||||
[Scene.WebAnalytics]: () => import('./web-analytics/WebAnalyticsScene'),
|
||||
[Scene.WebAnalyticsWebVitals]: () => import('./web-analytics/WebAnalyticsScene'),
|
||||
[Scene.RevenueAnalytics]: () => import('products/revenue_analytics/frontend/RevenueAnalyticsScene'),
|
||||
[Scene.Cohort]: () => import('./cohorts/Cohort'),
|
||||
[Scene.DataManagement]: () => import('./data-management/DataManagementScene'),
|
||||
[Scene.Activity]: () => import('./activity/ActivityScene'),
|
||||
|
||||
@@ -10,11 +10,11 @@ import { LemonTag } from 'lib/lemon-ui/LemonTag'
|
||||
import { Tooltip } from 'lib/lemon-ui/Tooltip'
|
||||
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
|
||||
import { capitalizeFirstLetter } from 'lib/utils'
|
||||
import { RevenueEventsSettings } from 'products/revenue_analytics/frontend/settings/RevenueEventsSettings'
|
||||
import React from 'react'
|
||||
import { NewActionButton } from 'scenes/actions/NewActionButton'
|
||||
import { Annotations } from 'scenes/annotations'
|
||||
import { NewAnnotationButton } from 'scenes/annotations/AnnotationModal'
|
||||
import { RevenueEventsSettings } from 'scenes/data-management/revenue/RevenueEventsSettings'
|
||||
import { Scene, SceneExport } from 'scenes/sceneTypes'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
@@ -98,7 +98,7 @@ const tabs: Record<
|
||||
),
|
||||
},
|
||||
[DataManagementTab.Revenue]: {
|
||||
url: urls.revenue(),
|
||||
url: urls.revenueSettings(),
|
||||
label: (
|
||||
<>
|
||||
Revenue{' '}
|
||||
|
||||
@@ -1,247 +0,0 @@
|
||||
import { IconInfo, IconTrash } from '@posthog/icons'
|
||||
import { useActions, useValues } from 'kea'
|
||||
import { DataWarehousePopoverField, TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
|
||||
import { TaxonomicPopover } from 'lib/components/TaxonomicPopover/TaxonomicPopover'
|
||||
import { LemonButton } from 'lib/lemon-ui/LemonButton'
|
||||
import { LemonTable } from 'lib/lemon-ui/LemonTable'
|
||||
import { Tooltip } from 'lib/lemon-ui/Tooltip'
|
||||
import { useCallback } from 'react'
|
||||
|
||||
import { RevenueTrackingDataWarehouseTable } from '~/queries/schema/schema-general'
|
||||
|
||||
import { databaseTableListLogic } from '../database/databaseTableListLogic'
|
||||
import { CurrencyDropdown } from './CurrencyDropdown'
|
||||
import { revenueEventsSettingsLogic } from './revenueEventsSettingsLogic'
|
||||
|
||||
type DataWarehousePopoverFieldKey = 'revenueField' | 'currencyField' | 'timestampField' | 'distinctIdColumn'
|
||||
|
||||
// NOTE: Not allowing HogQL right now, but we could add it in the future
|
||||
const DATA_WAREHOUSE_POPOVER_FIELDS: {
|
||||
key: DataWarehousePopoverFieldKey
|
||||
label: string
|
||||
description: string
|
||||
optional?: boolean
|
||||
}[] = [
|
||||
{
|
||||
key: 'distinctIdColumn' as const,
|
||||
label: 'Distinct ID Column',
|
||||
description: 'The distinct ID column in your table that uniquely identifies a row.',
|
||||
},
|
||||
{
|
||||
key: 'timestampField' as const,
|
||||
label: 'Timestamp Field',
|
||||
description:
|
||||
"The timestamp of the revenue entry. We'll use this to order the revenue entries and properly filter them on Web Analytics.",
|
||||
},
|
||||
{
|
||||
key: 'revenueField' as const,
|
||||
label: 'Revenue Field',
|
||||
description: 'The revenue amount of the entry.',
|
||||
},
|
||||
{
|
||||
key: 'currencyField' as const,
|
||||
label: 'Revenue Currency Field',
|
||||
description:
|
||||
"The currency code for this revenue entry. E.g. USD, EUR, GBP, etc. If not set, you'll be able to choose a static currency for all entries in this table.",
|
||||
optional: true,
|
||||
},
|
||||
] satisfies DataWarehousePopoverField[]
|
||||
|
||||
export function DataWarehouseTablesConfiguration({
|
||||
buttonRef,
|
||||
}: {
|
||||
buttonRef: React.RefObject<HTMLButtonElement>
|
||||
}): JSX.Element {
|
||||
const {
|
||||
baseCurrency,
|
||||
dataWarehouseTables,
|
||||
saveDataWarehouseTablesDisabledReason,
|
||||
changesMadeToDataWarehouseTables,
|
||||
} = useValues(revenueEventsSettingsLogic)
|
||||
const {
|
||||
addDataWarehouseTable,
|
||||
deleteDataWarehouseTable,
|
||||
updateDataWarehouseTableColumn,
|
||||
updateDataWarehouseTableRevenueCurrencyColumn,
|
||||
save,
|
||||
} = useActions(revenueEventsSettingsLogic)
|
||||
|
||||
const { dataWarehouseTablesMap } = useValues(databaseTableListLogic)
|
||||
|
||||
// Restricting to timestampColumn and revenueColumn because currency column
|
||||
// is slightly more complicated than that
|
||||
const renderPropertyColumn = useCallback(
|
||||
(key: keyof RevenueTrackingDataWarehouseTable & ('timestampColumn' | 'revenueColumn' | 'distinctIdColumn')) =>
|
||||
// eslint-disable-next-line react/display-name
|
||||
(_: any, item: RevenueTrackingDataWarehouseTable) => {
|
||||
return (
|
||||
<TaxonomicPopover
|
||||
size="small"
|
||||
className="my-1"
|
||||
groupType={TaxonomicFilterGroupType.DataWarehouseProperties}
|
||||
onChange={(newValue) => updateDataWarehouseTableColumn(item.tableName, key, newValue)}
|
||||
value={item[key]}
|
||||
schemaColumns={Object.values(dataWarehouseTablesMap?.[item.tableName]?.fields ?? {})}
|
||||
placeholder="Choose column"
|
||||
/>
|
||||
)
|
||||
},
|
||||
[dataWarehouseTablesMap, updateDataWarehouseTableColumn]
|
||||
)
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h3 className="mb-2">Data warehouse tables configuration</h3>
|
||||
<LemonTable<RevenueTrackingDataWarehouseTable>
|
||||
columns={[
|
||||
{ key: 'tableName', title: 'Data warehouse table name', dataIndex: 'tableName' },
|
||||
{
|
||||
key: 'distinctIdColumn',
|
||||
title: (
|
||||
<span>
|
||||
Distinct ID column
|
||||
<Tooltip title="The distinct ID column in your table that uniquely identifies a row.">
|
||||
<IconInfo className="ml-1" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
),
|
||||
dataIndex: 'distinctIdColumn',
|
||||
render: renderPropertyColumn('distinctIdColumn'),
|
||||
},
|
||||
{
|
||||
key: 'timestampColumn',
|
||||
title: (
|
||||
<span>
|
||||
Timestamp column
|
||||
<Tooltip title="The timestamp column in your table that identifies when the revenue entry was created. We'll use this to order the revenue entries and properly filter them by timestamp.">
|
||||
<IconInfo className="ml-1" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
),
|
||||
dataIndex: 'timestampColumn',
|
||||
render: renderPropertyColumn('timestampColumn'),
|
||||
},
|
||||
{
|
||||
key: 'revenueColumn',
|
||||
title: 'Revenue column',
|
||||
dataIndex: 'revenueColumn',
|
||||
render: renderPropertyColumn('revenueColumn'),
|
||||
},
|
||||
{
|
||||
key: 'revenueCurrencyColumn',
|
||||
title: (
|
||||
<span>
|
||||
Revenue currency column
|
||||
<Tooltip title="The currency of this revenue entry in your table. You can choose between a column on your table OR a hardcoded currency.">
|
||||
<IconInfo className="ml-1" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
),
|
||||
dataIndex: 'revenueCurrencyColumn',
|
||||
render: (_, item: RevenueTrackingDataWarehouseTable) => {
|
||||
return (
|
||||
<div className="flex flex-col w-full gap-3 my-1 min-w-[250px] whitespace-nowrap">
|
||||
<div className="flex flex-row gap-1">
|
||||
<span className="font-bold">Dynamic column: </span>
|
||||
<TaxonomicPopover
|
||||
size="small"
|
||||
groupType={TaxonomicFilterGroupType.DataWarehouseProperties}
|
||||
onChange={(newValue) =>
|
||||
updateDataWarehouseTableRevenueCurrencyColumn(item.tableName, {
|
||||
property: newValue!,
|
||||
})
|
||||
}
|
||||
value={item.revenueCurrencyColumn.property ?? null}
|
||||
schemaColumns={Object.values(
|
||||
dataWarehouseTablesMap?.[item.tableName]?.fields ?? {}
|
||||
)}
|
||||
placeholder="Choose column"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-row gap-1">
|
||||
or <span className="font-bold">Static currency: </span>
|
||||
<CurrencyDropdown
|
||||
size="small"
|
||||
onChange={(currency) =>
|
||||
updateDataWarehouseTableRevenueCurrencyColumn(item.tableName, {
|
||||
static: currency!,
|
||||
})
|
||||
}
|
||||
value={item.revenueCurrencyColumn.static ?? null}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'delete',
|
||||
fullWidth: true,
|
||||
title: (
|
||||
<div className="flex flex-col gap-1 items-end w-full">
|
||||
<div className="flex flex-row w-full gap-1 justify-end my-2">
|
||||
<TaxonomicPopover
|
||||
type="primary"
|
||||
groupType={TaxonomicFilterGroupType.DataWarehouse}
|
||||
dataWarehousePopoverFields={DATA_WAREHOUSE_POPOVER_FIELDS}
|
||||
onChange={(tableName, groupType, properties) => {
|
||||
// Sanity check, should always be DataWarehouse because we specify above
|
||||
if (groupType !== TaxonomicFilterGroupType.DataWarehouse) {
|
||||
return
|
||||
}
|
||||
|
||||
const typedProperties = properties as Record<
|
||||
DataWarehousePopoverFieldKey,
|
||||
string
|
||||
>
|
||||
addDataWarehouseTable({
|
||||
tableName: tableName as string,
|
||||
revenueColumn: typedProperties.revenueField,
|
||||
distinctIdColumn: typedProperties.distinctIdColumn,
|
||||
revenueCurrencyColumn: typedProperties.currencyField
|
||||
? { property: typedProperties.currencyField }
|
||||
: { static: baseCurrency },
|
||||
timestampColumn: typedProperties.timestampField,
|
||||
})
|
||||
}}
|
||||
value={undefined}
|
||||
placeholder="Create external data schema"
|
||||
placeholderClass=""
|
||||
id="data-management-revenue-settings-add-event"
|
||||
ref={buttonRef}
|
||||
/>
|
||||
|
||||
<LemonButton
|
||||
type="primary"
|
||||
onClick={save}
|
||||
disabledReason={saveDataWarehouseTablesDisabledReason}
|
||||
>
|
||||
Save
|
||||
</LemonButton>
|
||||
</div>
|
||||
{changesMadeToDataWarehouseTables && (
|
||||
<span className="text-xs text-error normal-case font-normal">
|
||||
Remember to save your changes
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
render: (_, item) => (
|
||||
<LemonButton
|
||||
className="float-right"
|
||||
size="small"
|
||||
type="secondary"
|
||||
onClick={() => deleteDataWarehouseTable(item.tableName)}
|
||||
icon={<IconTrash />}
|
||||
>
|
||||
Delete
|
||||
</LemonButton>
|
||||
),
|
||||
},
|
||||
]}
|
||||
dataSource={dataWarehouseTables}
|
||||
rowKey={(item) => `${item.tableName}-${item.revenueColumn}`}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -19,6 +19,7 @@ export enum Scene {
|
||||
WebAnalytics = 'WebAnalytics',
|
||||
WebAnalyticsWebVitals = 'WebAnalyticsWebVitals',
|
||||
WebAnalyticsPageReports = 'WebAnalyticsPageReports',
|
||||
RevenueAnalytics = 'RevenueAnalytics',
|
||||
Cohort = 'Cohort',
|
||||
Activity = 'Activity',
|
||||
DataManagement = 'DataManagement',
|
||||
|
||||
@@ -104,6 +104,12 @@ export const sceneConfigurations: Record<Scene | string, SceneConfig> = {
|
||||
layout: 'app-container',
|
||||
defaultDocsPath: '/docs/web-analytics',
|
||||
},
|
||||
[Scene.RevenueAnalytics]: {
|
||||
projectBased: true,
|
||||
name: 'Revenue analytics',
|
||||
layout: 'app-container',
|
||||
defaultDocsPath: '/docs/revenue-analytics',
|
||||
},
|
||||
[Scene.Cohort]: {
|
||||
projectBased: true,
|
||||
name: 'Cohort',
|
||||
@@ -552,7 +558,8 @@ export const routes: Record<string, [Scene | string, string]> = {
|
||||
[urls.webAnalytics()]: [Scene.WebAnalytics, 'webAnalytics'],
|
||||
[urls.webAnalyticsWebVitals()]: [Scene.WebAnalytics, 'webAnalyticsWebVitals'],
|
||||
[urls.webAnalyticsPageReports()]: [Scene.WebAnalytics, 'webAnalyticsPageReports'],
|
||||
[urls.revenue()]: [Scene.DataManagement, 'revenue'],
|
||||
[urls.revenueAnalytics()]: [Scene.RevenueAnalytics, 'revenueAnalytics'],
|
||||
[urls.revenueSettings()]: [Scene.DataManagement, 'revenue'],
|
||||
[urls.actions()]: [Scene.DataManagement, 'actions'],
|
||||
[urls.eventDefinitions()]: [Scene.DataManagement, 'eventDefinitions'],
|
||||
[urls.eventDefinition(':id')]: [Scene.EventDefinition, 'eventDefinition'],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useActions, useValues } from 'kea'
|
||||
import { CurrencyDropdown } from 'scenes/data-management/revenue/CurrencyDropdown'
|
||||
import { revenueEventsSettingsLogic } from 'scenes/data-management/revenue/revenueEventsSettingsLogic'
|
||||
import { CurrencyDropdown } from 'products/revenue_analytics/frontend/settings/CurrencyDropdown'
|
||||
import { revenueEventsSettingsLogic } from 'products/revenue_analytics/frontend/settings/revenueEventsSettingsLogic'
|
||||
|
||||
import { CurrencyCode } from '~/queries/schema/schema-general'
|
||||
|
||||
|
||||
@@ -105,17 +105,6 @@ exports[`verifiedDomainsLogic values has proper defaults 1`] = `
|
||||
],
|
||||
"revenue_tracking_config": {
|
||||
"baseCurrency": "USD",
|
||||
"dataWarehouseTables": [
|
||||
{
|
||||
"distinctIdColumn": "distinct_id",
|
||||
"revenueColumn": "revenue",
|
||||
"revenueCurrencyColumn": {
|
||||
"static": "SCR",
|
||||
},
|
||||
"tableName": "mock_table",
|
||||
"timestampColumn": "timestamp",
|
||||
},
|
||||
],
|
||||
"events": [
|
||||
{
|
||||
"eventName": "purchase",
|
||||
|
||||
@@ -13,11 +13,7 @@ import { Link } from 'lib/lemon-ui/Link'
|
||||
import { isNotNil, isObject, pluralize } from 'lib/utils'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
import {
|
||||
CurrencyCode,
|
||||
RevenueTrackingDataWarehouseTable,
|
||||
RevenueTrackingEventItem,
|
||||
} from '~/queries/schema/schema-general'
|
||||
import { CurrencyCode, RevenueTrackingEventItem } from '~/queries/schema/schema-general'
|
||||
import { ActivityScope, TeamSurveyConfigType, TeamType } from '~/types'
|
||||
|
||||
import { ThemeName } from './dataThemeLogic'
|
||||
@@ -437,28 +433,10 @@ const teamActionsMapping: Record<
|
||||
const removedEvents = beforeEventNames?.filter((event) => !afterEventNames?.includes(event))
|
||||
const modifiedEvents = afterEventNames?.filter((event) => beforeEventNames?.includes(event))
|
||||
|
||||
const beforedataWarehouseTables: RevenueTrackingDataWarehouseTable[] =
|
||||
typeof change.before === 'object' && change.before && 'dataWarehouseTables' in change.before
|
||||
? change.before.dataWarehouseTables
|
||||
: []
|
||||
const afterdataWarehouseTables: RevenueTrackingDataWarehouseTable[] =
|
||||
typeof change.after === 'object' && change.after && 'dataWarehouseTables' in change.after
|
||||
? change.after.dataWarehouseTables
|
||||
: []
|
||||
|
||||
const beforeExternalDataSchemaNames = beforedataWarehouseTables?.map((schema) => schema?.tableName)
|
||||
const afterExternalDataSchemaNames = afterdataWarehouseTables?.map((schema) => schema?.tableName)
|
||||
const addeddataWarehouseTables = afterExternalDataSchemaNames?.filter(
|
||||
(schema) => !beforeExternalDataSchemaNames?.includes(schema)
|
||||
)
|
||||
const removeddataWarehouseTables = beforeExternalDataSchemaNames?.filter(
|
||||
(schema) => !afterExternalDataSchemaNames?.includes(schema)
|
||||
)
|
||||
const modifieddataWarehouseTables = afterExternalDataSchemaNames?.filter((schema) =>
|
||||
beforeExternalDataSchemaNames?.includes(schema)
|
||||
)
|
||||
|
||||
const changes = [
|
||||
beforeCurrency && afterCurrency && beforeCurrency !== afterCurrency
|
||||
? `changed base currency from ${beforeCurrency} to ${afterCurrency}`
|
||||
: null,
|
||||
addedEvents?.length
|
||||
? `added ${addedEvents.length} ${pluralize(
|
||||
addedEvents.length,
|
||||
@@ -483,33 +461,6 @@ const teamActionsMapping: Record<
|
||||
true
|
||||
)} (${modifiedEvents.join(', ')})`
|
||||
: null,
|
||||
addeddataWarehouseTables?.length
|
||||
? `added ${addeddataWarehouseTables.length} ${pluralize(
|
||||
addeddataWarehouseTables.length,
|
||||
'data warehouse table',
|
||||
'data warehouse tables',
|
||||
true
|
||||
)} (${addeddataWarehouseTables.join(', ')})`
|
||||
: null,
|
||||
removeddataWarehouseTables?.length
|
||||
? `removed ${removeddataWarehouseTables.length} ${pluralize(
|
||||
removeddataWarehouseTables.length,
|
||||
'data warehouse table',
|
||||
'data warehouse tables',
|
||||
true
|
||||
)} (${removeddataWarehouseTables.join(', ')})`
|
||||
: null,
|
||||
modifieddataWarehouseTables?.length
|
||||
? `modified ${modifieddataWarehouseTables.length} ${pluralize(
|
||||
modifieddataWarehouseTables.length,
|
||||
'data warehouse table',
|
||||
'data warehouse tables',
|
||||
true
|
||||
)} (${modifieddataWarehouseTables.join(', ')})`
|
||||
: null,
|
||||
beforeCurrency && afterCurrency && beforeCurrency !== afterCurrency
|
||||
? `changed base currency from ${beforeCurrency} to ${afterCurrency}`
|
||||
: null,
|
||||
].filter(isNotNil)
|
||||
|
||||
if (!changes.length) {
|
||||
|
||||
@@ -39,7 +39,7 @@ export const urls = {
|
||||
event: (id: string, timestamp: string): string =>
|
||||
`/events/${encodeURIComponent(id)}/${encodeURIComponent(timestamp)}`,
|
||||
ingestionWarnings: (): string => '/data-management/ingestion-warnings',
|
||||
revenue: (): string => '/data-management/revenue',
|
||||
revenueSettings: (): string => '/data-management/revenue',
|
||||
|
||||
pipelineNodeNew: (stage: PipelineStage | ':stage', id?: string | number): string => {
|
||||
return `/pipeline/new/${stage}${id ? `/${id}` : ''}`
|
||||
|
||||
@@ -6,7 +6,6 @@ import { actionToUrl, router, urlToAction } from 'kea-router'
|
||||
import { windowValues } from 'kea-window-values'
|
||||
import api from 'lib/api'
|
||||
import { authorizedUrlListLogic, AuthorizedUrlListType } from 'lib/components/AuthorizedUrlList/authorizedUrlListLogic'
|
||||
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
|
||||
import { FEATURE_FLAGS, RETENTION_FIRST_TIME } from 'lib/constants'
|
||||
import { LemonButton } from 'lib/lemon-ui/LemonButton'
|
||||
import { Link, PostHogComDocsURL } from 'lib/lemon-ui/Link/Link'
|
||||
@@ -29,7 +28,6 @@ import {
|
||||
BreakdownFilter,
|
||||
CompareFilter,
|
||||
CustomEventConversionGoal,
|
||||
DataWarehouseNode,
|
||||
EventsNode,
|
||||
InsightVizNode,
|
||||
NodeKind,
|
||||
@@ -967,7 +965,7 @@ export const webAnalyticsLogic = kea<webAnalyticsLogicType>([
|
||||
!!featureFlags[FEATURE_FLAGS.WEB_REVENUE_TRACKING] &&
|
||||
!(conversionGoal && 'actionId' in conversionGoal)
|
||||
|
||||
const revenueEventsSeries: (EventsNode | DataWarehouseNode)[] =
|
||||
const revenueEventsSeries: EventsNode[] =
|
||||
includeRevenue && currentTeam?.revenue_tracking_config
|
||||
? ([
|
||||
...currentTeam.revenue_tracking_config.events.map((e) => ({
|
||||
@@ -979,22 +977,7 @@ export const webAnalyticsLogic = kea<webAnalyticsLogicType>([
|
||||
math_property: e.revenueProperty,
|
||||
math_property_revenue_currency: e.revenueCurrencyProperty,
|
||||
})),
|
||||
...(featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS]
|
||||
? currentTeam.revenue_tracking_config.dataWarehouseTables.map((t) => ({
|
||||
id: t.tableName,
|
||||
name: t.tableName,
|
||||
table_name: t.tableName,
|
||||
kind: NodeKind.DataWarehouseNode,
|
||||
math: PropertyMathType.Sum,
|
||||
id_field: t.distinctIdColumn,
|
||||
distinct_id_field: t.distinctIdColumn,
|
||||
math_property: t.revenueColumn,
|
||||
math_property_type: TaxonomicFilterGroupType.DataWarehouseProperties,
|
||||
math_property_revenue_currency: t.revenueCurrencyColumn,
|
||||
timestamp_field: t.timestampColumn,
|
||||
}))
|
||||
: []),
|
||||
] as (EventsNode | DataWarehouseNode)[])
|
||||
] as EventsNode[])
|
||||
: []
|
||||
|
||||
const conversionRevenueSeries =
|
||||
|
||||
@@ -203,6 +203,7 @@ export enum ProductKey {
|
||||
TEAMS = 'teams',
|
||||
WEB_ANALYTICS = 'web_analytics',
|
||||
ERROR_TRACKING = 'error_tracking',
|
||||
REVENUE_ANALYTICS = 'revenue_analytics',
|
||||
}
|
||||
|
||||
type ProductKeyUnion = `${ProductKey}`
|
||||
|
||||
299
pnpm-lock.yaml
generated
299
pnpm-lock.yaml
generated
@@ -42,10 +42,10 @@ importers:
|
||||
devDependencies:
|
||||
'@parcel/packager-ts':
|
||||
specifier: 2.13.3
|
||||
version: 2.13.3(@parcel/core@2.13.3)
|
||||
version: 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))
|
||||
'@parcel/transformer-typescript-types':
|
||||
specifier: 2.13.3
|
||||
version: 2.13.3(@parcel/core@2.13.3)(typescript@4.9.5)
|
||||
version: 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))(typescript@4.9.5)
|
||||
eslint:
|
||||
specifier: ^8.57.0
|
||||
version: 8.57.0
|
||||
@@ -66,7 +66,7 @@ importers:
|
||||
version: 2.29.0(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)
|
||||
eslint-plugin-jest:
|
||||
specifier: ^28.6.0
|
||||
version: 28.6.0(@typescript-eslint/eslint-plugin@7.1.1(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(jest@29.7.0)(typescript@4.9.5)
|
||||
version: 28.6.0(@typescript-eslint/eslint-plugin@7.1.1(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(jest@29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5)))(typescript@4.9.5)
|
||||
eslint-plugin-posthog:
|
||||
specifier: workspace:*
|
||||
version: link:common/eslint_rules
|
||||
@@ -99,7 +99,7 @@ importers:
|
||||
version: 4.3.0(stylelint@15.11.0(typescript@4.9.5))
|
||||
stylelint-config-standard-scss:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0(postcss@8.5.3)(stylelint@15.11.0(typescript@4.9.5))
|
||||
version: 11.1.0(postcss@8.4.31)(stylelint@15.11.0(typescript@4.9.5))
|
||||
stylelint-order:
|
||||
specifier: ^6.0.3
|
||||
version: 6.0.3(stylelint@15.11.0(typescript@4.9.5))
|
||||
@@ -198,7 +198,7 @@ importers:
|
||||
version: 3.12.1
|
||||
jest:
|
||||
specifier: ^29.7.0
|
||||
version: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
version: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
parcel:
|
||||
specifier: ^2.13.3
|
||||
version: 2.13.3(@swc/helpers@0.5.15)(cssnano@7.0.6(postcss@8.5.3))(postcss@8.5.3)(relateurl@0.2.7)(svgo@3.3.2)(terser@5.19.1)(typescript@4.9.5)
|
||||
@@ -306,7 +306,7 @@ importers:
|
||||
version: 7.6.4(@babel/core@7.26.0)(@swc/core@1.11.4(@swc/helpers@0.5.15))(@swc/helpers@0.5.15)(encoding@0.1.13)(esbuild@0.18.20)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(type-fest@3.5.3)(typescript@4.9.5)(webpack-cli@5.1.4)(webpack-hot-middleware@2.25.4)
|
||||
'@storybook/test-runner':
|
||||
specifier: ^0.16.0
|
||||
version: 0.16.0(@swc/helpers@0.5.15)(encoding@0.1.13)
|
||||
version: 0.16.0(@swc/helpers@0.5.15)(@types/node@18.18.4)(encoding@0.1.13)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
'@storybook/theming':
|
||||
specifier: ^7.6.4
|
||||
version: 7.6.20(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||
@@ -382,7 +382,7 @@ importers:
|
||||
version: 7.24.0
|
||||
'@cypress/webpack-preprocessor':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2(@babel/core@7.26.0)(@babel/preset-env@7.23.5(@babel/core@7.26.0))(babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2))(webpack@5.88.2)
|
||||
version: 6.0.2(@babel/core@7.26.0)(@babel/preset-env@7.23.5(@babel/core@7.26.0))(babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))))(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
'@posthog/frontend':
|
||||
specifier: workspace:*
|
||||
version: link:../frontend
|
||||
@@ -391,7 +391,7 @@ importers:
|
||||
version: link:../common/storybook
|
||||
css-loader:
|
||||
specifier: '*'
|
||||
version: 3.6.0(webpack@5.88.2)
|
||||
version: 3.6.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
cypress:
|
||||
specifier: ^13.11.0
|
||||
version: 13.11.0
|
||||
@@ -409,28 +409,28 @@ importers:
|
||||
version: link:../common/eslint_rules
|
||||
file-loader:
|
||||
specifier: '*'
|
||||
version: 6.2.0(webpack@5.88.2)
|
||||
version: 6.2.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
less-loader:
|
||||
specifier: '*'
|
||||
version: 7.3.0(less@4.2.2)(webpack@5.88.2)
|
||||
version: 7.3.0(less@4.2.2)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
postcss-loader:
|
||||
specifier: '*'
|
||||
version: 4.3.0(postcss@8.5.3)(webpack@5.88.2)
|
||||
version: 4.3.0(postcss@8.5.3)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
posthog-js:
|
||||
specifier: '*'
|
||||
version: 1.217.2
|
||||
sass-loader:
|
||||
specifier: '*'
|
||||
version: 10.3.1(sass@1.56.0)(webpack@5.88.2)
|
||||
version: 10.3.1(sass@1.56.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
style-loader:
|
||||
specifier: '*'
|
||||
version: 2.0.0(webpack@5.88.2)
|
||||
version: 2.0.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
typescript:
|
||||
specifier: ~4.9.5
|
||||
version: 4.9.5
|
||||
webpack:
|
||||
specifier: '*'
|
||||
version: 5.88.2
|
||||
version: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
devDependencies:
|
||||
'@babel/core':
|
||||
specifier: ^7.22.10
|
||||
@@ -461,7 +461,7 @@ importers:
|
||||
version: 7.23.3(@babel/core@7.26.0)
|
||||
babel-loader:
|
||||
specifier: ^8.0.6
|
||||
version: 8.3.0(@babel/core@7.26.0)(webpack@5.88.2)
|
||||
version: 8.3.0(@babel/core@7.26.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
babel-plugin-import:
|
||||
specifier: ^1.13.0
|
||||
version: 1.13.8
|
||||
@@ -601,6 +601,9 @@ importers:
|
||||
'@posthog/products-replay':
|
||||
specifier: workspace:*
|
||||
version: link:../products/replay
|
||||
'@posthog/products-revenue-analytics':
|
||||
specifier: workspace:*
|
||||
version: link:../products/revenue_analytics
|
||||
'@posthog/products-web-analytics':
|
||||
specifier: workspace:*
|
||||
version: link:../products/web_analytics
|
||||
@@ -1178,7 +1181,7 @@ importers:
|
||||
version: 4.3.0(stylelint@15.11.0(typescript@4.9.5))
|
||||
stylelint-config-standard-scss:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0(postcss@8.4.31)(stylelint@15.11.0(typescript@4.9.5))
|
||||
version: 11.1.0(postcss@8.5.3)(stylelint@15.11.0(typescript@4.9.5))
|
||||
stylelint-order:
|
||||
specifier: ^6.0.3
|
||||
version: 6.0.3(stylelint@15.11.0(typescript@4.9.5))
|
||||
@@ -1730,7 +1733,7 @@ importers:
|
||||
version: link:../../common/eslint_rules
|
||||
jest:
|
||||
specifier: '*'
|
||||
version: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
version: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
kea:
|
||||
specifier: '*'
|
||||
version: 3.1.5(react@18.2.0)
|
||||
@@ -1888,6 +1891,36 @@ importers:
|
||||
specifier: '*'
|
||||
version: 18.2.0
|
||||
|
||||
products/revenue_analytics:
|
||||
dependencies:
|
||||
'@posthog/icons':
|
||||
specifier: '*'
|
||||
version: 0.11.7(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||
'@storybook/react':
|
||||
specifier: '*'
|
||||
version: 7.6.4(encoding@0.1.13)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@4.9.5)
|
||||
'@types/react':
|
||||
specifier: '*'
|
||||
version: 17.0.52
|
||||
clsx:
|
||||
specifier: '*'
|
||||
version: 1.2.1
|
||||
kea:
|
||||
specifier: '*'
|
||||
version: 3.1.5(react@18.2.0)
|
||||
kea-forms:
|
||||
specifier: '*'
|
||||
version: 3.2.0(kea@3.1.5(react@18.2.0))
|
||||
kea-loaders:
|
||||
specifier: '*'
|
||||
version: 3.0.0(kea@3.1.5(react@18.2.0))
|
||||
kea-router:
|
||||
specifier: '*'
|
||||
version: 3.2.1(kea@3.1.5(react@18.2.0))
|
||||
react:
|
||||
specifier: '*'
|
||||
version: 18.2.0
|
||||
|
||||
products/web_analytics:
|
||||
dependencies:
|
||||
'@posthog/icons':
|
||||
@@ -17824,15 +17857,15 @@ snapshots:
|
||||
tunnel-agent: 0.6.0
|
||||
uuid: 8.3.2
|
||||
|
||||
'@cypress/webpack-preprocessor@6.0.2(@babel/core@7.26.0)(@babel/preset-env@7.23.5(@babel/core@7.26.0))(babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2))(webpack@5.88.2)':
|
||||
'@cypress/webpack-preprocessor@6.0.2(@babel/core@7.26.0)(@babel/preset-env@7.23.5(@babel/core@7.26.0))(babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))))(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))':
|
||||
dependencies:
|
||||
'@babel/core': 7.26.0
|
||||
'@babel/preset-env': 7.23.5(@babel/core@7.26.0)
|
||||
babel-loader: 8.3.0(@babel/core@7.26.0)(webpack@5.88.2)
|
||||
babel-loader: 8.3.0(@babel/core@7.26.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
bluebird: 3.7.1
|
||||
debug: 4.4.0(supports-color@8.1.1)
|
||||
lodash: 4.17.21
|
||||
webpack: 5.88.2
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
@@ -18743,14 +18776,6 @@ snapshots:
|
||||
'@parcel/utils': 2.13.3
|
||||
lmdb: 2.8.5
|
||||
|
||||
'@parcel/cache@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/core': 2.13.3
|
||||
'@parcel/fs': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/logger': 2.13.3
|
||||
'@parcel/utils': 2.13.3
|
||||
lmdb: 2.8.5
|
||||
|
||||
'@parcel/codeframe@2.13.3':
|
||||
dependencies:
|
||||
chalk: 4.1.2
|
||||
@@ -18807,36 +18832,6 @@ snapshots:
|
||||
- typescript
|
||||
- uncss
|
||||
|
||||
'@parcel/core@2.13.3':
|
||||
dependencies:
|
||||
'@mischnic/json-sourcemap': 0.1.1
|
||||
'@parcel/cache': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
'@parcel/events': 2.13.3
|
||||
'@parcel/feature-flags': 2.13.3
|
||||
'@parcel/fs': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/graph': 3.3.3
|
||||
'@parcel/logger': 2.13.3
|
||||
'@parcel/package-manager': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/plugin': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/profiler': 2.13.3
|
||||
'@parcel/rust': 2.13.3
|
||||
'@parcel/source-map': 2.1.1
|
||||
'@parcel/types': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/utils': 2.13.3
|
||||
'@parcel/workers': 2.13.3(@parcel/core@2.13.3)
|
||||
base-x: 3.0.10
|
||||
browserslist: 4.24.3
|
||||
clone: 2.1.2
|
||||
dotenv: 16.4.7
|
||||
dotenv-expand: 11.0.7
|
||||
json5: 2.2.3
|
||||
msgpackr: 1.11.2
|
||||
nullthrows: 1.1.1
|
||||
semver: 7.7.0
|
||||
transitivePeerDependencies:
|
||||
- '@swc/helpers'
|
||||
|
||||
'@parcel/core@2.13.3(@swc/helpers@0.5.15)':
|
||||
dependencies:
|
||||
'@mischnic/json-sourcemap': 0.1.1
|
||||
@@ -18886,16 +18881,6 @@ snapshots:
|
||||
'@parcel/watcher': 2.5.1
|
||||
'@parcel/workers': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))
|
||||
|
||||
'@parcel/fs@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/core': 2.13.3
|
||||
'@parcel/feature-flags': 2.13.3
|
||||
'@parcel/rust': 2.13.3
|
||||
'@parcel/types-internal': 2.13.3
|
||||
'@parcel/utils': 2.13.3
|
||||
'@parcel/watcher': 2.5.1
|
||||
'@parcel/workers': 2.13.3(@parcel/core@2.13.3)
|
||||
|
||||
'@parcel/graph@3.3.3':
|
||||
dependencies:
|
||||
'@parcel/feature-flags': 2.13.3
|
||||
@@ -18930,18 +18915,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/node-resolver-core@3.4.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@mischnic/json-sourcemap': 0.1.1
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
'@parcel/fs': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/rust': 2.13.3
|
||||
'@parcel/utils': 2.13.3
|
||||
nullthrows: 1.1.1
|
||||
semver: 7.7.0
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/optimizer-css@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))':
|
||||
dependencies:
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
@@ -19018,21 +18991,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@swc/helpers'
|
||||
|
||||
'@parcel/package-manager@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/core': 2.13.3
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
'@parcel/fs': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/logger': 2.13.3
|
||||
'@parcel/node-resolver-core': 3.4.3(@parcel/core@2.13.3)
|
||||
'@parcel/types': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/utils': 2.13.3
|
||||
'@parcel/workers': 2.13.3(@parcel/core@2.13.3)
|
||||
'@swc/core': 1.11.4(@swc/helpers@0.5.15)
|
||||
semver: 7.7.0
|
||||
transitivePeerDependencies:
|
||||
- '@swc/helpers'
|
||||
|
||||
'@parcel/packager-css@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))':
|
||||
dependencies:
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
@@ -19088,12 +19046,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/packager-ts@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/plugin': 2.13.3(@parcel/core@2.13.3)
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/packager-wasm@2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))':
|
||||
dependencies:
|
||||
'@parcel/plugin': 2.13.3(@parcel/core@2.13.3(@swc/helpers@0.5.15))
|
||||
@@ -19106,12 +19058,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/plugin@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/types': 2.13.3(@parcel/core@2.13.3)
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/profiler@2.13.3':
|
||||
dependencies:
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
@@ -19324,18 +19270,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/transformer-typescript-types@2.13.3(@parcel/core@2.13.3)(typescript@4.9.5)':
|
||||
dependencies:
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
'@parcel/plugin': 2.13.3(@parcel/core@2.13.3)
|
||||
'@parcel/source-map': 2.1.1
|
||||
'@parcel/ts-utils': 2.13.3(typescript@4.9.5)
|
||||
'@parcel/utils': 2.13.3
|
||||
nullthrows: 1.1.1
|
||||
typescript: 4.9.5
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/ts-utils@2.13.3(typescript@4.9.5)':
|
||||
dependencies:
|
||||
nullthrows: 1.1.1
|
||||
@@ -19355,13 +19289,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/types@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/types-internal': 2.13.3
|
||||
'@parcel/workers': 2.13.3(@parcel/core@2.13.3)
|
||||
transitivePeerDependencies:
|
||||
- '@parcel/core'
|
||||
|
||||
'@parcel/utils@2.13.3':
|
||||
dependencies:
|
||||
'@parcel/codeframe': 2.13.3
|
||||
@@ -19443,16 +19370,6 @@ snapshots:
|
||||
'@parcel/utils': 2.13.3
|
||||
nullthrows: 1.1.1
|
||||
|
||||
'@parcel/workers@2.13.3(@parcel/core@2.13.3)':
|
||||
dependencies:
|
||||
'@parcel/core': 2.13.3
|
||||
'@parcel/diagnostic': 2.13.3
|
||||
'@parcel/logger': 2.13.3
|
||||
'@parcel/profiler': 2.13.3
|
||||
'@parcel/types-internal': 2.13.3
|
||||
'@parcel/utils': 2.13.3
|
||||
nullthrows: 1.1.1
|
||||
|
||||
'@pkgjs/parseargs@0.11.0':
|
||||
optional: true
|
||||
|
||||
@@ -21410,7 +21327,7 @@ snapshots:
|
||||
- encoding
|
||||
- supports-color
|
||||
|
||||
'@storybook/test-runner@0.16.0(@swc/helpers@0.5.15)(encoding@0.1.13)':
|
||||
'@storybook/test-runner@0.16.0(@swc/helpers@0.5.15)(@types/node@18.18.4)(encoding@0.1.13)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))':
|
||||
dependencies:
|
||||
'@babel/core': 7.26.0
|
||||
'@babel/generator': 7.26.3
|
||||
@@ -21427,14 +21344,14 @@ snapshots:
|
||||
commander: 9.4.1
|
||||
expect-playwright: 0.8.0
|
||||
glob: 10.4.5
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest-circus: 29.7.0
|
||||
jest-environment-node: 29.7.0
|
||||
jest-junit: 16.0.0
|
||||
jest-playwright-preset: 4.0.0(jest-circus@29.7.0)(jest-environment-node@29.7.0)(jest-runner@29.7.0)(jest@29.7.0)
|
||||
jest-playwright-preset: 4.0.0(jest-circus@29.7.0)(jest-environment-node@29.7.0)(jest-runner@29.7.0)(jest@29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5)))
|
||||
jest-runner: 29.7.0
|
||||
jest-serializer-html: 7.1.0
|
||||
jest-watch-typeahead: 2.2.2(jest@29.7.0)
|
||||
jest-watch-typeahead: 2.2.2(jest@29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5)))
|
||||
node-fetch: 2.6.9(encoding@0.1.13)
|
||||
playwright: 1.45.0
|
||||
read-pkg-up: 7.0.1
|
||||
@@ -23333,6 +23250,15 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
'@babel/core': 7.26.0
|
||||
find-cache-dir: 3.3.2
|
||||
loader-utils: 2.0.4
|
||||
make-dir: 3.1.0
|
||||
schema-utils: 2.7.1
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
babel-loader@8.3.0(@babel/core@7.26.0)(webpack@5.88.2):
|
||||
dependencies:
|
||||
'@babel/core': 7.26.0
|
||||
@@ -24390,6 +24316,23 @@ snapshots:
|
||||
postcss-selector-parser: 7.1.0
|
||||
postcss-value-parser: 4.2.0
|
||||
|
||||
css-loader@3.6.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
camelcase: 5.3.1
|
||||
cssesc: 3.0.0
|
||||
icss-utils: 4.1.1
|
||||
loader-utils: 1.4.2
|
||||
normalize-path: 3.0.0
|
||||
postcss: 7.0.39
|
||||
postcss-modules-extract-imports: 2.0.0
|
||||
postcss-modules-local-by-default: 3.0.3
|
||||
postcss-modules-scope: 2.2.0
|
||||
postcss-modules-values: 3.0.0
|
||||
postcss-value-parser: 4.2.0
|
||||
schema-utils: 2.7.1
|
||||
semver: 6.3.1
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
css-loader@3.6.0(webpack@5.88.2):
|
||||
dependencies:
|
||||
camelcase: 5.3.1
|
||||
@@ -25668,7 +25611,7 @@ snapshots:
|
||||
|
||||
eslint-import-resolver-node@0.3.9:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
is-core-module: 2.13.1
|
||||
resolve: 1.22.8
|
||||
transitivePeerDependencies:
|
||||
@@ -25676,7 +25619,7 @@ snapshots:
|
||||
|
||||
eslint-module-utils@2.8.0(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint-import-resolver-node@0.3.9)(eslint@8.57.0):
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
optionalDependencies:
|
||||
'@typescript-eslint/parser': 7.1.1(eslint@8.57.0)(typescript@4.9.5)
|
||||
eslint: 8.57.0
|
||||
@@ -25718,7 +25661,7 @@ snapshots:
|
||||
array.prototype.findlastindex: 1.2.3
|
||||
array.prototype.flat: 1.3.2
|
||||
array.prototype.flatmap: 1.3.2
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
doctrine: 2.1.0
|
||||
eslint: 8.57.0
|
||||
eslint-import-resolver-node: 0.3.9
|
||||
@@ -25750,17 +25693,6 @@ snapshots:
|
||||
- supports-color
|
||||
- typescript
|
||||
|
||||
eslint-plugin-jest@28.6.0(@typescript-eslint/eslint-plugin@7.1.1(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(jest@29.7.0)(typescript@4.9.5):
|
||||
dependencies:
|
||||
'@typescript-eslint/utils': 7.1.1(eslint@8.57.0)(typescript@4.9.5)
|
||||
eslint: 8.57.0
|
||||
optionalDependencies:
|
||||
'@typescript-eslint/eslint-plugin': 7.1.1(@typescript-eslint/parser@7.1.1(eslint@8.57.0)(typescript@4.9.5))(eslint@8.57.0)(typescript@4.9.5)
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
- typescript
|
||||
|
||||
eslint-plugin-no-only-tests@3.3.0: {}
|
||||
|
||||
eslint-plugin-node@11.1.0(eslint@8.57.0):
|
||||
@@ -26182,6 +26114,12 @@ snapshots:
|
||||
dependencies:
|
||||
flat-cache: 3.2.0
|
||||
|
||||
file-loader@6.2.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
loader-utils: 2.0.4
|
||||
schema-utils: 3.3.0
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
file-loader@6.2.0(webpack@5.88.2):
|
||||
dependencies:
|
||||
loader-utils: 2.0.4
|
||||
@@ -27959,10 +27897,10 @@ snapshots:
|
||||
'@types/node': 18.18.4
|
||||
jest-util: 29.7.0
|
||||
|
||||
jest-playwright-preset@4.0.0(jest-circus@29.7.0)(jest-environment-node@29.7.0)(jest-runner@29.7.0)(jest@29.7.0):
|
||||
jest-playwright-preset@4.0.0(jest-circus@29.7.0)(jest-environment-node@29.7.0)(jest-runner@29.7.0)(jest@29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))):
|
||||
dependencies:
|
||||
expect-playwright: 0.8.0
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest-circus: 29.7.0
|
||||
jest-environment-node: 29.7.0
|
||||
jest-process-manager: 0.4.0
|
||||
@@ -28116,11 +28054,11 @@ snapshots:
|
||||
leven: 3.1.0
|
||||
pretty-format: 29.7.0
|
||||
|
||||
jest-watch-typeahead@2.2.2(jest@29.7.0):
|
||||
jest-watch-typeahead@2.2.2(jest@29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))):
|
||||
dependencies:
|
||||
ansi-escapes: 6.0.0
|
||||
chalk: 5.4.1
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.10.14(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest: 29.7.0(@types/node@18.18.4)(ts-node@10.9.1(@swc/core@1.11.4(@swc/helpers@0.5.15))(@types/node@18.18.4)(typescript@4.9.5))
|
||||
jest-regex-util: 29.6.3
|
||||
jest-watcher: 29.7.0
|
||||
slash: 5.1.0
|
||||
@@ -28452,6 +28390,14 @@ snapshots:
|
||||
dotenv: 16.4.7
|
||||
dotenv-expand: 10.0.0
|
||||
|
||||
less-loader@7.3.0(less@4.2.2)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
klona: 2.0.5
|
||||
less: 4.2.2
|
||||
loader-utils: 2.0.4
|
||||
schema-utils: 3.3.0
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
less-loader@7.3.0(less@4.2.2)(webpack@5.88.2):
|
||||
dependencies:
|
||||
klona: 2.0.5
|
||||
@@ -30295,6 +30241,16 @@ snapshots:
|
||||
'@csstools/utilities': 2.0.0(postcss@8.5.2)
|
||||
postcss: 8.5.2
|
||||
|
||||
postcss-loader@4.3.0(postcss@8.5.3)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
cosmiconfig: 7.1.0
|
||||
klona: 2.0.5
|
||||
loader-utils: 2.0.4
|
||||
postcss: 8.5.3
|
||||
schema-utils: 3.3.0
|
||||
semver: 7.7.0
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
postcss-loader@4.3.0(postcss@8.5.3)(webpack@5.88.2):
|
||||
dependencies:
|
||||
cosmiconfig: 7.1.0
|
||||
@@ -31941,6 +31897,17 @@ snapshots:
|
||||
sass-embedded-win32-ia32: 1.70.0
|
||||
sass-embedded-win32-x64: 1.70.0
|
||||
|
||||
sass-loader@10.3.1(sass@1.56.0)(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
klona: 2.0.5
|
||||
loader-utils: 2.0.4
|
||||
neo-async: 2.6.2
|
||||
schema-utils: 3.3.0
|
||||
semver: 7.7.0
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
optionalDependencies:
|
||||
sass: 1.56.0
|
||||
|
||||
sass-loader@10.3.1(sass@1.56.0)(webpack@5.88.2):
|
||||
dependencies:
|
||||
klona: 2.0.5
|
||||
@@ -32565,6 +32532,12 @@ snapshots:
|
||||
|
||||
stubs@3.0.0: {}
|
||||
|
||||
style-loader@2.0.0(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
loader-utils: 2.0.4
|
||||
schema-utils: 3.3.0
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
|
||||
style-loader@2.0.0(webpack@5.88.2):
|
||||
dependencies:
|
||||
loader-utils: 2.0.4
|
||||
@@ -32908,14 +32881,16 @@ snapshots:
|
||||
'@swc/core': 1.11.4(@swc/helpers@0.5.15)
|
||||
esbuild: 0.18.20
|
||||
|
||||
terser-webpack-plugin@5.3.9(webpack@5.88.2):
|
||||
terser-webpack-plugin@5.3.9(@swc/core@1.11.4(@swc/helpers@0.5.15))(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))):
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.25
|
||||
jest-worker: 27.5.1
|
||||
schema-utils: 3.3.0
|
||||
serialize-javascript: 6.0.1
|
||||
terser: 5.19.1
|
||||
webpack: 5.88.2
|
||||
webpack: 5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15))
|
||||
optionalDependencies:
|
||||
'@swc/core': 1.11.4(@swc/helpers@0.5.15)
|
||||
|
||||
terser@5.19.1:
|
||||
dependencies:
|
||||
@@ -33696,7 +33671,7 @@ snapshots:
|
||||
|
||||
webpack-virtual-modules@0.5.0: {}
|
||||
|
||||
webpack@5.88.2:
|
||||
webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)):
|
||||
dependencies:
|
||||
'@types/eslint-scope': 3.7.4
|
||||
'@types/estree': 1.0.1
|
||||
@@ -33719,7 +33694,7 @@ snapshots:
|
||||
neo-async: 2.6.2
|
||||
schema-utils: 3.3.0
|
||||
tapable: 2.2.1
|
||||
terser-webpack-plugin: 5.3.9(webpack@5.88.2)
|
||||
terser-webpack-plugin: 5.3.9(@swc/core@1.11.4(@swc/helpers@0.5.15))(webpack@5.88.2(@swc/core@1.11.4(@swc/helpers@0.5.15)))
|
||||
watchpack: 2.4.0
|
||||
webpack-sources: 3.2.3
|
||||
transitivePeerDependencies:
|
||||
|
||||
@@ -294,7 +294,10 @@ class TestOrganizationSerializer(APIBaseTest):
|
||||
self.assertEqual(teams1[0]["name"], self.team.name)
|
||||
|
||||
self.assertEqual(len(teams2), 2)
|
||||
self.assertEqual([teams2[0]["name"], teams2[1]["name"]], ["Default project", team2.name])
|
||||
self.assertEqual(
|
||||
sorted([team["name"] for team in teams2]),
|
||||
sorted(["Default project", team2.name]),
|
||||
)
|
||||
|
||||
|
||||
class TestOrganizationRbacMigrations(APIBaseTest):
|
||||
|
||||
@@ -177,7 +177,6 @@ class HedgeboxMatrix(Matrix):
|
||||
"revenueCurrencyProperty": {"static": "USD"},
|
||||
}
|
||||
],
|
||||
"dataWarehouseTables": [],
|
||||
}
|
||||
|
||||
# Dashboard: Key metrics (project home)
|
||||
|
||||
@@ -92,11 +92,14 @@ from posthog.schema import (
|
||||
PersonsOnEventsMode,
|
||||
SessionTableVersion,
|
||||
)
|
||||
from posthog.warehouse.models.external_data_source import ExternalDataSource
|
||||
from posthog.warehouse.models.external_data_job import ExternalDataJob
|
||||
from posthog.warehouse.models.table import (
|
||||
DataWarehouseTable,
|
||||
DataWarehouseTableColumns,
|
||||
)
|
||||
from posthog.warehouse.models.external_data_schema import ExternalDataSchema
|
||||
from products.revenue_analytics.backend.models import RevenueAnalyticsRevenueView
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from posthog.models import Team
|
||||
@@ -393,39 +396,55 @@ def create_hogql_database(
|
||||
|
||||
warehouse_tables[table.name] = s3_table
|
||||
|
||||
def define_mappings(warehouse: dict[str, Table], get_table: Callable):
|
||||
if "id" not in warehouse[warehouse_modifier.table_name].fields.keys():
|
||||
warehouse[warehouse_modifier.table_name].fields["id"] = ExpressionField(
|
||||
# For every Stripe source, let's generate its own revenue view
|
||||
# Prefetch related schemas and tables to avoid N+1
|
||||
with timings.measure("revenue_analytics_views"):
|
||||
with timings.measure("select"):
|
||||
stripe_sources = list(
|
||||
ExternalDataSource.objects.filter(team_id=team.pk, source_type=ExternalDataSource.Type.STRIPE)
|
||||
.exclude(deleted=True)
|
||||
.prefetch_related(Prefetch("schemas", queryset=ExternalDataSchema.objects.prefetch_related("table")))
|
||||
)
|
||||
|
||||
for stripe_source in stripe_sources:
|
||||
with timings.measure(f"for_schema_source_{stripe_source.prefix or stripe_source.id}"):
|
||||
view = RevenueAnalyticsRevenueView.for_schema_source(stripe_source)
|
||||
if view is not None:
|
||||
views[f"stripe_{stripe_source.prefix or stripe_source.id}_revenue"] = view
|
||||
|
||||
def define_mappings(store: dict[str, Table], get_table: Callable):
|
||||
if "id" not in store[warehouse_modifier.table_name].fields.keys():
|
||||
store[warehouse_modifier.table_name].fields["id"] = ExpressionField(
|
||||
name="id",
|
||||
expr=parse_expr(warehouse_modifier.id_field),
|
||||
)
|
||||
|
||||
if "timestamp" not in warehouse[warehouse_modifier.table_name].fields.keys() or not isinstance(
|
||||
warehouse[warehouse_modifier.table_name].fields.get("timestamp"), DateTimeDatabaseField
|
||||
if "timestamp" not in store[warehouse_modifier.table_name].fields.keys() or not isinstance(
|
||||
store[warehouse_modifier.table_name].fields.get("timestamp"), DateTimeDatabaseField
|
||||
):
|
||||
table_model = get_table(team=team, warehouse_modifier=warehouse_modifier)
|
||||
timestamp_field_type = table_model.get_clickhouse_column_type(warehouse_modifier.timestamp_field)
|
||||
|
||||
# If field type is none or datetime, we can use the field directly
|
||||
if timestamp_field_type is None or timestamp_field_type.startswith("DateTime"):
|
||||
warehouse[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField(
|
||||
store[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField(
|
||||
name="timestamp",
|
||||
expr=ast.Field(chain=[warehouse_modifier.timestamp_field]),
|
||||
)
|
||||
else:
|
||||
warehouse[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField(
|
||||
store[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField(
|
||||
name="timestamp",
|
||||
expr=ast.Call(name="toDateTime", args=[ast.Field(chain=[warehouse_modifier.timestamp_field])]),
|
||||
)
|
||||
|
||||
# TODO: Need to decide how the distinct_id and person_id fields are going to be handled
|
||||
if "distinct_id" not in warehouse[warehouse_modifier.table_name].fields.keys():
|
||||
warehouse[warehouse_modifier.table_name].fields["distinct_id"] = ExpressionField(
|
||||
if "distinct_id" not in store[warehouse_modifier.table_name].fields.keys():
|
||||
store[warehouse_modifier.table_name].fields["distinct_id"] = ExpressionField(
|
||||
name="distinct_id",
|
||||
expr=parse_expr(warehouse_modifier.distinct_id_field),
|
||||
)
|
||||
|
||||
if "person_id" not in warehouse[warehouse_modifier.table_name].fields.keys():
|
||||
if "person_id" not in store[warehouse_modifier.table_name].fields.keys():
|
||||
events_join = (
|
||||
DataWarehouseJoin.objects.filter(
|
||||
team_id=team.pk,
|
||||
@@ -436,38 +455,39 @@ def create_hogql_database(
|
||||
.first()
|
||||
)
|
||||
if events_join:
|
||||
warehouse[warehouse_modifier.table_name].fields["person_id"] = FieldTraverser(
|
||||
store[warehouse_modifier.table_name].fields["person_id"] = FieldTraverser(
|
||||
chain=[events_join.field_name, "person_id"]
|
||||
)
|
||||
else:
|
||||
warehouse[warehouse_modifier.table_name].fields["person_id"] = ExpressionField(
|
||||
store[warehouse_modifier.table_name].fields["person_id"] = ExpressionField(
|
||||
name="person_id",
|
||||
expr=parse_expr(warehouse_modifier.distinct_id_field),
|
||||
)
|
||||
|
||||
return warehouse
|
||||
return store
|
||||
|
||||
if modifiers.dataWarehouseEventsModifiers:
|
||||
for warehouse_modifier in modifiers.dataWarehouseEventsModifiers:
|
||||
# TODO: add all field mappings
|
||||
with timings.measure("data_warehouse_event_modifiers"):
|
||||
for warehouse_modifier in modifiers.dataWarehouseEventsModifiers:
|
||||
with timings.measure(f"data_warehouse_event_modifier_{warehouse_modifier.table_name}"):
|
||||
# TODO: add all field mappings
|
||||
is_view = warehouse_modifier.table_name in views.keys()
|
||||
|
||||
is_view = warehouse_modifier.table_name in views.keys()
|
||||
|
||||
if is_view:
|
||||
views = define_mappings(
|
||||
views,
|
||||
lambda team, warehouse_modifier: DataWarehouseSavedQuery.objects.exclude(deleted=True)
|
||||
.filter(team_id=team.pk, name=warehouse_modifier.table_name)
|
||||
.latest("created_at"),
|
||||
)
|
||||
else:
|
||||
warehouse_tables = define_mappings(
|
||||
warehouse_tables,
|
||||
lambda team, warehouse_modifier: DataWarehouseTable.objects.exclude(deleted=True)
|
||||
.filter(team_id=team.pk, name=warehouse_modifier.table_name)
|
||||
.select_related("credential", "external_data_source")
|
||||
.latest("created_at"),
|
||||
)
|
||||
if is_view:
|
||||
views = define_mappings(
|
||||
views,
|
||||
lambda team, warehouse_modifier: DataWarehouseSavedQuery.objects.exclude(deleted=True)
|
||||
.filter(team_id=team.pk, name=warehouse_modifier.table_name)
|
||||
.latest("created_at"),
|
||||
)
|
||||
else:
|
||||
warehouse_tables = define_mappings(
|
||||
warehouse_tables,
|
||||
lambda team, warehouse_modifier: DataWarehouseTable.objects.exclude(deleted=True)
|
||||
.filter(team_id=team.pk, name=warehouse_modifier.table_name)
|
||||
.select_related("credential", "external_data_source")
|
||||
.latest("created_at"),
|
||||
)
|
||||
|
||||
database.add_warehouse_tables(**warehouse_tables)
|
||||
database.add_views(**views)
|
||||
@@ -694,17 +714,19 @@ def serialize_database(
|
||||
if view is None:
|
||||
continue
|
||||
|
||||
saved_query = views_dict.get(view_name)
|
||||
if not saved_query:
|
||||
continue
|
||||
|
||||
fields = serialize_fields(view.fields, context, view_name, table_type="external")
|
||||
fields_dict = {field.name: field for field in fields}
|
||||
|
||||
saved_query = views_dict.get(view_name)
|
||||
if saved_query:
|
||||
tables[view_name] = DatabaseSchemaViewTable(
|
||||
fields=fields_dict,
|
||||
id=str(saved_query.pk),
|
||||
name=view.name,
|
||||
query=HogQLQuery(query=saved_query.query["query"]),
|
||||
)
|
||||
tables[view_name] = DatabaseSchemaViewTable(
|
||||
fields=fields_dict,
|
||||
id=str(saved_query.pk),
|
||||
name=view.name,
|
||||
query=HogQLQuery(query=saved_query.query["query"]),
|
||||
)
|
||||
|
||||
return tables
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ from posthog.schema import (
|
||||
CurrencyCode,
|
||||
RevenueTrackingConfig,
|
||||
RevenueTrackingEventItem,
|
||||
RevenueTrackingDataWarehouseTable,
|
||||
)
|
||||
from posthog.models.exchange_rate.sql import EXCHANGE_RATE_DECIMAL_PRECISION
|
||||
from posthog.hogql.database.models import (
|
||||
@@ -210,82 +209,3 @@ def revenue_where_expr_for_events(config: Union[RevenueTrackingConfig, dict, Non
|
||||
return exprs[0]
|
||||
|
||||
return ast.Or(exprs=exprs)
|
||||
|
||||
|
||||
# ##############################################
|
||||
# Revenue from data warehouse tables
|
||||
|
||||
|
||||
# Given a data warehouse config and the base config, figure out what the currency should look like
|
||||
def currency_expression_for_data_warehouse(
|
||||
config: RevenueTrackingConfig, data_warehouse_config: RevenueTrackingDataWarehouseTable
|
||||
) -> ast.Expr:
|
||||
# Shouldn't happen but we need it here to make the type checker happy
|
||||
if not data_warehouse_config.revenueCurrencyColumn:
|
||||
return ast.Constant(value=(config.baseCurrency or DEFAULT_CURRENCY).value)
|
||||
|
||||
if data_warehouse_config.revenueCurrencyColumn.property:
|
||||
return ast.Call(
|
||||
name="upper",
|
||||
args=[
|
||||
ast.Field(chain=[data_warehouse_config.tableName, data_warehouse_config.revenueCurrencyColumn.property])
|
||||
],
|
||||
)
|
||||
|
||||
currency = data_warehouse_config.revenueCurrencyColumn.static or config.baseCurrency or DEFAULT_CURRENCY
|
||||
return ast.Constant(value=currency.value)
|
||||
|
||||
|
||||
def revenue_expression_for_data_warehouse(
|
||||
config: RevenueTrackingConfig,
|
||||
data_warehouse_config: RevenueTrackingDataWarehouseTable,
|
||||
do_currency_conversion: bool = True,
|
||||
) -> ast.Expr:
|
||||
# Convert the revenue to the base currency based on `data_warehouse_config.revenueCurrencyColumn`
|
||||
# Otherwise, assume we're already in the base currency
|
||||
# Also, assume that `base_currency` is USD by default, it'll be empty for most customers
|
||||
if data_warehouse_config.revenueCurrencyColumn and do_currency_conversion:
|
||||
value_expr = ast.Call(
|
||||
name="if",
|
||||
args=[
|
||||
ast.Call(name="isNull", args=[currency_expression_for_data_warehouse(config, data_warehouse_config)]),
|
||||
ast.Call(
|
||||
name="toDecimal",
|
||||
args=[
|
||||
ast.Field(chain=[data_warehouse_config.tableName, data_warehouse_config.revenueColumn]),
|
||||
ast.Constant(value=EXCHANGE_RATE_DECIMAL_PRECISION),
|
||||
],
|
||||
),
|
||||
convert_currency_call(
|
||||
ast.Field(chain=[data_warehouse_config.tableName, data_warehouse_config.revenueColumn]),
|
||||
currency_expression_for_data_warehouse(config, data_warehouse_config),
|
||||
ast.Constant(value=(config.baseCurrency or DEFAULT_CURRENCY).value),
|
||||
ast.Call(
|
||||
name="_toDate",
|
||||
args=[
|
||||
# Because we can have nullable timestamp columns, we need to handle that case
|
||||
# by converting to a default value of 0
|
||||
ast.Call(
|
||||
name="ifNull",
|
||||
args=[
|
||||
ast.Field(
|
||||
chain=[data_warehouse_config.tableName, data_warehouse_config.timestampColumn]
|
||||
),
|
||||
ast.Call(name="toDateTime", args=[ast.Constant(value=0)]),
|
||||
],
|
||||
)
|
||||
],
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
else:
|
||||
value_expr = ast.Call(
|
||||
name="toDecimal",
|
||||
args=[
|
||||
ast.Field(chain=[data_warehouse_config.tableName, data_warehouse_config.revenueColumn]),
|
||||
ast.Constant(value=EXCHANGE_RATE_DECIMAL_PRECISION),
|
||||
],
|
||||
)
|
||||
|
||||
return value_expr
|
||||
|
||||
@@ -630,9 +630,11 @@ class TestDatabase(BaseTest, QueryMatchingTest):
|
||||
with self.assertNumQueries(FuzzyInt(5, 7)):
|
||||
create_hogql_database(team=self.team)
|
||||
|
||||
# We keep adding sources, credentials and tables, number of queries should be stable
|
||||
def test_external_data_source_is_not_n_plus_1(self) -> None:
|
||||
num_queries = FuzzyInt(5, 10)
|
||||
|
||||
for i in range(10):
|
||||
# we keep adding sources, credentials and tables, number of queries should be stable
|
||||
source = ExternalDataSource.objects.create(
|
||||
team=self.team,
|
||||
source_id=f"source_id_{i}",
|
||||
@@ -662,10 +664,9 @@ class TestDatabase(BaseTest, QueryMatchingTest):
|
||||
table=warehouse_table,
|
||||
should_sync=True,
|
||||
last_synced_at="2024-01-01",
|
||||
# No status but should be completed because a data warehouse table already exists
|
||||
)
|
||||
|
||||
with self.assertNumQueries(FuzzyInt(5, 7)):
|
||||
with self.assertNumQueries(num_queries):
|
||||
create_hogql_database(team=self.team)
|
||||
|
||||
def test_database_warehouse_joins_persons_poe_old_properties(self):
|
||||
|
||||
@@ -252,6 +252,7 @@ HOGQL_CLICKHOUSE_FUNCTIONS: dict[str, HogQLFunctionMeta] = {
|
||||
signatures=[
|
||||
((IntegerType(),), IntegerType()),
|
||||
((FloatType(),), FloatType()),
|
||||
((DecimalType(),), DecimalType()),
|
||||
],
|
||||
),
|
||||
"abs": HogQLFunctionMeta(
|
||||
|
||||
@@ -378,7 +378,9 @@ def get_query_runner(
|
||||
)
|
||||
|
||||
if kind == "RevenueExampleEventsQuery":
|
||||
from .web_analytics.revenue_example_events_query_runner import RevenueExampleEventsQueryRunner
|
||||
from products.revenue_analytics.backend.hogql_queries.revenue_example_events_query_runner import (
|
||||
RevenueExampleEventsQueryRunner,
|
||||
)
|
||||
|
||||
return RevenueExampleEventsQueryRunner(
|
||||
query=query,
|
||||
@@ -389,7 +391,7 @@ def get_query_runner(
|
||||
)
|
||||
|
||||
if kind == "RevenueExampleDataWarehouseTablesQuery":
|
||||
from .web_analytics.revenue_example_data_warehouse_tables_query_runner import (
|
||||
from products.revenue_analytics.backend.hogql_queries.revenue_example_data_warehouse_tables_query_runner import (
|
||||
RevenueExampleDataWarehouseTablesQueryRunner,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: TestWebAnalyticsRevenue.test_dw_revenue_currency_property
|
||||
'''
|
||||
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toStartOfDay(assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))), 1))) AS date,
|
||||
arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
|
||||
and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
|
||||
FROM
|
||||
(SELECT sum(total) AS count,
|
||||
day_start AS day_start
|
||||
FROM
|
||||
(SELECT sum(if(equals(e.currency, 'USD'), toDecimal64(e.revenue, 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', e.currency, toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(e.revenue, 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', e.currency, toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)))))) AS total,
|
||||
toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start
|
||||
FROM s3('test://localhost', 'test-key', 'test-secret', 'Parquet', '`id` String, `revenue` Float64, `currency` String, `timestamp` DateTime') AS e
|
||||
WHERE and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))
|
||||
GROUP BY day_start)
|
||||
GROUP BY day_start
|
||||
ORDER BY day_start ASC)
|
||||
ORDER BY arraySum(total) DESC
|
||||
LIMIT 50000 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestWebAnalyticsRevenue.test_dw_revenue_currency_static
|
||||
'''
|
||||
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toStartOfDay(assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))), 1))) AS date,
|
||||
arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
|
||||
and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
|
||||
FROM
|
||||
(SELECT sum(total) AS count,
|
||||
day_start AS day_start
|
||||
FROM
|
||||
(SELECT sum(if(equals('GBP', 'USD'), toDecimal64(e.revenue, 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(e.revenue, 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)))))) AS total,
|
||||
toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start
|
||||
FROM s3('test://localhost', 'test-key', 'test-secret', 'Parquet', '`id` String, `revenue` Float64, `currency` String, `timestamp` DateTime') AS e
|
||||
WHERE and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))
|
||||
GROUP BY day_start)
|
||||
GROUP BY day_start
|
||||
ORDER BY day_start ASC)
|
||||
ORDER BY arraySum(total) DESC
|
||||
LIMIT 50000 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestWebAnalyticsRevenue.test_events_revenue_currency_property
|
||||
'''
|
||||
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toStartOfDay(assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))), 1))) AS date,
|
||||
arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
|
||||
and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
|
||||
FROM
|
||||
(SELECT sum(total) AS count,
|
||||
day_start AS day_start
|
||||
FROM
|
||||
(SELECT sum(if(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'currency'), ''), 'null'), '^"|"$', ''), 'USD'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'currency'), ''), 'null'), '^"|"$', ''), toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'currency'), ''), 'null'), '^"|"$', ''), toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)))))) AS total,
|
||||
toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start
|
||||
FROM events AS e SAMPLE 1
|
||||
WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))), equals(e.event, 'product_sold'))
|
||||
GROUP BY day_start)
|
||||
GROUP BY day_start
|
||||
ORDER BY day_start ASC)
|
||||
ORDER BY arraySum(total) DESC
|
||||
LIMIT 50000 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestWebAnalyticsRevenue.test_events_revenue_currency_static
|
||||
'''
|
||||
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC'))), toStartOfDay(assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))))), 1))) AS date,
|
||||
arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
|
||||
and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
|
||||
FROM
|
||||
(SELECT sum(total) AS count,
|
||||
day_start AS day_start
|
||||
FROM
|
||||
(SELECT sum(if(equals('GBP', 'USD'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(e.timestamp, 'UTC')), toDecimal64(0, 10)))))) AS total,
|
||||
toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start
|
||||
FROM events AS e SAMPLE 1
|
||||
WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(toDateTime('2024-01-01 00:00:00', 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(toDateTime('2024-01-02 23:59:59', 'UTC'))), equals(e.event, 'product_sold'))
|
||||
GROUP BY day_start)
|
||||
GROUP BY day_start
|
||||
ORDER BY day_start ASC)
|
||||
ORDER BY arraySum(total) DESC
|
||||
LIMIT 50000 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
@@ -1,216 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: TestRevenueExampleEventsQueryRunner.test_multiple_events
|
||||
'''
|
||||
SELECT tuple(events.uuid, events.event, events.distinct_id, events.properties) AS `tuple(uuid, event, distinct_id, properties)`,
|
||||
events.event AS event,
|
||||
multiIf(equals(events.event, 'purchase_a'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), equals(events.event, 'purchase_b'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), NULL) AS original_revenue,
|
||||
multiIf(equals(events.event, 'purchase_a'), 'USD', equals(events.event, 'purchase_b'), 'USD', NULL) AS original_currency,
|
||||
multiIf(equals(events.event, 'purchase_a'), if(isNull('USD'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals('USD', 'USD'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), equals(events.event, 'purchase_b'), if(isNull('USD'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals('USD', 'USD'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), NULL) AS revenue,
|
||||
'USD' AS currency,
|
||||
tuple(events__person.id, events__person.created_at, events.distinct_id, events__person.properties) AS `tuple(person.id, person.created_at, distinct_id, person.properties)`,
|
||||
nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id,
|
||||
toTimeZone(events.timestamp, 'UTC') AS timestamp
|
||||
FROM events
|
||||
LEFT OUTER JOIN
|
||||
(SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id,
|
||||
person_distinct_id_overrides.distinct_id AS distinct_id
|
||||
FROM person_distinct_id_overrides
|
||||
WHERE equals(person_distinct_id_overrides.team_id, 99999)
|
||||
GROUP BY person_distinct_id_overrides.distinct_id
|
||||
HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id)
|
||||
LEFT JOIN
|
||||
(SELECT person.id AS id,
|
||||
toTimeZone(person.created_at, 'UTC') AS created_at,
|
||||
person.properties AS properties
|
||||
FROM person
|
||||
WHERE and(equals(person.team_id, 99999), in(tuple(person.id, person.version),
|
||||
(SELECT person.id AS id, max(person.version) AS version
|
||||
FROM person
|
||||
WHERE equals(person.team_id, 99999)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id)
|
||||
WHERE and(equals(events.team_id, 99999), or(equals(events.event, 'purchase_a'), equals(events.event, 'purchase_b')), isNotNull(revenue))
|
||||
ORDER BY toTimeZone(events.timestamp, 'UTC') DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestRevenueExampleEventsQueryRunner.test_no_crash_when_no_data
|
||||
'''
|
||||
SELECT tuple(events.uuid, events.event, events.distinct_id, events.properties) AS `tuple(uuid, event, distinct_id, properties)`,
|
||||
events.event AS event,
|
||||
NULL AS original_revenue,
|
||||
NULL AS original_currency,
|
||||
NULL AS revenue,
|
||||
'USD' AS currency,
|
||||
tuple(events__person.id, events__person.created_at, events.distinct_id, events__person.properties) AS `tuple(person.id, person.created_at, distinct_id, person.properties)`,
|
||||
nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id,
|
||||
toTimeZone(events.timestamp, 'UTC') AS timestamp
|
||||
FROM events
|
||||
LEFT OUTER JOIN
|
||||
(SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id,
|
||||
person_distinct_id_overrides.distinct_id AS distinct_id
|
||||
FROM person_distinct_id_overrides
|
||||
WHERE equals(person_distinct_id_overrides.team_id, 99999)
|
||||
GROUP BY person_distinct_id_overrides.distinct_id
|
||||
HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id)
|
||||
LEFT JOIN
|
||||
(SELECT person.id AS id,
|
||||
toTimeZone(person.created_at, 'UTC') AS created_at,
|
||||
person.properties AS properties
|
||||
FROM person
|
||||
WHERE and(equals(person.team_id, 99999), in(tuple(person.id, person.version),
|
||||
(SELECT person.id AS id, max(person.version) AS version
|
||||
FROM person
|
||||
WHERE equals(person.team_id, 99999)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id)
|
||||
WHERE and(equals(events.team_id, 99999), 0, isNotNull(revenue))
|
||||
ORDER BY toTimeZone(events.timestamp, 'UTC') DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestRevenueExampleEventsQueryRunner.test_revenue_currency_property
|
||||
'''
|
||||
SELECT tuple(events.uuid, events.event, events.distinct_id, events.properties) AS `tuple(uuid, event, distinct_id, properties)`,
|
||||
events.event AS event,
|
||||
multiIf(equals(events.event, 'purchase_a'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), equals(events.event, 'purchase_b'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), NULL) AS original_revenue,
|
||||
multiIf(equals(events.event, 'purchase_a'), 'GBP', equals(events.event, 'purchase_b'), upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), NULL) AS original_currency,
|
||||
multiIf(equals(events.event, 'purchase_a'), if(isNull('GBP'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals('GBP', 'EUR'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'EUR', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), equals(events.event, 'purchase_b'), if(isNull(upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', ''))), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals(upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), 'EUR'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'EUR', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), NULL) AS revenue,
|
||||
'EUR' AS currency,
|
||||
tuple(events__person.id, events__person.created_at, events.distinct_id, events__person.properties) AS `tuple(person.id, person.created_at, distinct_id, person.properties)`,
|
||||
nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id,
|
||||
toTimeZone(events.timestamp, 'UTC') AS timestamp
|
||||
FROM events
|
||||
LEFT OUTER JOIN
|
||||
(SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id,
|
||||
person_distinct_id_overrides.distinct_id AS distinct_id
|
||||
FROM person_distinct_id_overrides
|
||||
WHERE equals(person_distinct_id_overrides.team_id, 99999)
|
||||
GROUP BY person_distinct_id_overrides.distinct_id
|
||||
HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id)
|
||||
INNER JOIN
|
||||
(SELECT person.id AS id,
|
||||
toTimeZone(person.created_at, 'UTC') AS created_at,
|
||||
person.properties AS properties
|
||||
FROM person
|
||||
WHERE and(equals(person.team_id, 99999), in(tuple(person.id, person.version),
|
||||
(SELECT person.id AS id, max(person.version) AS version
|
||||
FROM person
|
||||
WHERE equals(person.team_id, 99999)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id)
|
||||
WHERE and(equals(events.team_id, 99999), or(equals(events.event, 'purchase_a'), equals(events.event, 'purchase_b')), isNotNull(revenue))
|
||||
ORDER BY toTimeZone(events.timestamp, 'UTC') DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestRevenueExampleEventsQueryRunner.test_revenue_currency_property_without_feature_flag
|
||||
'''
|
||||
SELECT tuple(events.uuid, events.event, events.distinct_id, events.properties) AS `tuple(uuid, event, distinct_id, properties)`,
|
||||
events.event AS event,
|
||||
multiIf(equals(events.event, 'purchase_a'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), equals(events.event, 'purchase_b'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), NULL) AS original_revenue,
|
||||
multiIf(equals(events.event, 'purchase_a'), 'GBP', equals(events.event, 'purchase_b'), upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), NULL) AS original_currency,
|
||||
multiIf(equals(events.event, 'purchase_a'), if(isNull('GBP'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals('GBP', 'EUR'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_a'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'GBP', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'EUR', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), equals(events.event, 'purchase_b'), if(isNull(upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', ''))), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals(upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), 'EUR'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue_b'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', upper(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'currency_b'), ''), 'null'), '^"|"$', '')), toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'EUR', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), NULL) AS revenue,
|
||||
'EUR' AS currency,
|
||||
tuple(events__person.id, events__person.created_at, events.distinct_id, events__person.properties) AS `tuple(person.id, person.created_at, distinct_id, person.properties)`,
|
||||
nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id,
|
||||
toTimeZone(events.timestamp, 'UTC') AS timestamp
|
||||
FROM events
|
||||
LEFT OUTER JOIN
|
||||
(SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id,
|
||||
person_distinct_id_overrides.distinct_id AS distinct_id
|
||||
FROM person_distinct_id_overrides
|
||||
WHERE equals(person_distinct_id_overrides.team_id, 99999)
|
||||
GROUP BY person_distinct_id_overrides.distinct_id
|
||||
HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id)
|
||||
LEFT JOIN
|
||||
(SELECT person.id AS id,
|
||||
toTimeZone(person.created_at, 'UTC') AS created_at,
|
||||
person.properties AS properties
|
||||
FROM person
|
||||
WHERE and(equals(person.team_id, 99999), in(tuple(person.id, person.version),
|
||||
(SELECT person.id AS id, max(person.version) AS version
|
||||
FROM person
|
||||
WHERE equals(person.team_id, 99999)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id)
|
||||
WHERE and(equals(events.team_id, 99999), or(equals(events.event, 'purchase_a'), equals(events.event, 'purchase_b')), isNotNull(revenue))
|
||||
ORDER BY toTimeZone(events.timestamp, 'UTC') DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestRevenueExampleEventsQueryRunner.test_single_event
|
||||
'''
|
||||
SELECT tuple(events.uuid, events.event, events.distinct_id, events.properties) AS `tuple(uuid, event, distinct_id, properties)`,
|
||||
events.event AS event,
|
||||
multiIf(equals(events.event, 'purchase'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), NULL) AS original_revenue,
|
||||
multiIf(equals(events.event, 'purchase'), 'USD', NULL) AS original_currency,
|
||||
multiIf(equals(events.event, 'purchase'), if(isNull('USD'), accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 'Decimal64(10)'), if(equals('USD', 'USD'), toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'revenue'), ''), 'null'), '^"|"$', ''), 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', 'USD', toDate(toTimeZone(events.timestamp, 'UTC')), toDecimal64(0, 10)))))), NULL) AS revenue,
|
||||
'USD' AS currency,
|
||||
tuple(events__person.id, events__person.created_at, events.distinct_id, events__person.properties) AS `tuple(person.id, person.created_at, distinct_id, person.properties)`,
|
||||
nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id,
|
||||
toTimeZone(events.timestamp, 'UTC') AS timestamp
|
||||
FROM events
|
||||
LEFT OUTER JOIN
|
||||
(SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id,
|
||||
person_distinct_id_overrides.distinct_id AS distinct_id
|
||||
FROM person_distinct_id_overrides
|
||||
WHERE equals(person_distinct_id_overrides.team_id, 99999)
|
||||
GROUP BY person_distinct_id_overrides.distinct_id
|
||||
HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id)
|
||||
LEFT JOIN
|
||||
(SELECT person.id AS id,
|
||||
toTimeZone(person.created_at, 'UTC') AS created_at,
|
||||
person.properties AS properties
|
||||
FROM person
|
||||
WHERE and(equals(person.team_id, 99999), in(tuple(person.id, person.version),
|
||||
(SELECT person.id AS id, max(person.version) AS version
|
||||
FROM person
|
||||
WHERE equals(person.team_id, 99999)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id)
|
||||
WHERE and(equals(events.team_id, 99999), equals(events.event, 'purchase'), isNotNull(revenue))
|
||||
ORDER BY toTimeZone(events.timestamp, 'UTC') DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
@@ -62,16 +62,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_comparison
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-06 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-13 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-05 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -112,12 +112,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_goal_no_conversions
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -160,12 +160,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_goal_one_autocapture_conversion
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -205,12 +205,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_goal_one_custom_action_conversion
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -250,12 +250,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_goal_one_custom_event_conversion
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -295,12 +295,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_goal_one_pageview_conversion
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -343,12 +343,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_conversion_rate
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -391,16 +391,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_correctly_counts_pageviews_in_long_running_session
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -441,16 +441,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_dont_filter_test_accounts
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -491,16 +491,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_filter_cohort
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -546,16 +546,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_filter_test_accounts
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -606,16 +606,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_increase_in_users
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -656,16 +656,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_increase_in_users_using_mobile
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -706,16 +706,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_limit_is_context_aware
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -756,16 +756,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_no_crash_when_no_data
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -806,16 +806,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_no_crash_when_no_data.1
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -856,12 +856,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_no_crash_when_no_data.2
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-08 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-15 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-30 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-07 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -904,12 +904,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_no_revenue_when_action_conversion_goal_set_but_include_revenue_disabled
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -949,12 +949,12 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_no_revenue_when_event_conversion_goal_set_but_include_revenue_disabled
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate
|
||||
FROM
|
||||
@@ -994,16 +994,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS revenue,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_revenue
|
||||
FROM
|
||||
@@ -1047,16 +1047,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue_conversion_event
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_conversion_revenue
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_conversion_revenue
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -1095,16 +1095,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue_conversion_event_with_multiple_revenue_events
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_conversion_revenue
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_conversion_revenue
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -1143,16 +1143,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue_conversion_no_config
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_conversions,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_conversion_count,
|
||||
sumIf(conversion_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_total_conversion_count,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_conversions,
|
||||
uniqIf(conversion_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_conversions,
|
||||
divide(unique_conversions, unique_users) AS conversion_rate,
|
||||
divide(previous_unique_conversions, previous_unique_users) AS previous_conversion_rate,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_conversion_revenue
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS conversion_revenue,
|
||||
sumIf(session_conversion_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_conversion_revenue
|
||||
FROM
|
||||
(SELECT any(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id)) AS session_person_id,
|
||||
events__session.session_id AS session_id,
|
||||
@@ -1191,16 +1191,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue_multiple_events
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS revenue,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_revenue
|
||||
FROM
|
||||
@@ -1244,16 +1244,16 @@
|
||||
# ---
|
||||
# name: TestWebOverviewQueryRunner.test_revenue_no_config
|
||||
'''
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0), isNotNull(session_id))) AS prev_bounce_rate,
|
||||
SELECT uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_users,
|
||||
uniqIf(session_person_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_users,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS total_filtered_pageview_count,
|
||||
sumIf(filtered_pageview_count, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_filtered_pageview_count,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS unique_sessions,
|
||||
uniqIf(session_id, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_unique_sessions,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS avg_duration_s,
|
||||
avgIf(session_duration, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_avg_duration_s,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS bounce_rate,
|
||||
avgIf(is_bounce, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS prev_bounce_rate,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-12-01 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-12-03 23:59:59', 'UTC'))), 0))) AS revenue,
|
||||
sumIf(session_revenue, and(ifNull(greaterOrEquals(start_timestamp, assumeNotNull(toDateTime('2023-11-28 00:00:00', 'UTC'))), 0), ifNull(less(start_timestamp, assumeNotNull(toDateTime('2023-11-30 23:59:59', 'UTC'))), 0))) AS previous_revenue
|
||||
FROM
|
||||
|
||||
@@ -1,259 +0,0 @@
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from freezegun import freeze_time
|
||||
from unittest.mock import patch
|
||||
|
||||
from posthog.hogql.constants import LimitContext
|
||||
from posthog.hogql_queries.web_analytics.revenue_example_data_warehouse_tables_query_runner import (
|
||||
RevenueExampleDataWarehouseTablesQueryRunner,
|
||||
)
|
||||
from posthog.schema import (
|
||||
RevenueExampleDataWarehouseTablesQuery,
|
||||
RevenueTrackingConfig,
|
||||
RevenueCurrencyPropertyConfig,
|
||||
RevenueExampleDataWarehouseTablesQueryResponse,
|
||||
RevenueTrackingDataWarehouseTable,
|
||||
CurrencyCode,
|
||||
)
|
||||
from posthog.test.base import (
|
||||
APIBaseTest,
|
||||
ClickhouseTestMixin,
|
||||
snapshot_clickhouse_queries,
|
||||
)
|
||||
from posthog.warehouse.models import (
|
||||
DataWarehouseTable,
|
||||
DataWarehouseCredential,
|
||||
)
|
||||
|
||||
EMPTY_REVENUE_TRACKING_CONFIG = RevenueTrackingConfig(baseCurrency=CurrencyCode.GBP, events=[], dataWarehouseTables=[])
|
||||
|
||||
SINGLE_TABLE_REVENUE_TRACKING_CONFIG = RevenueTrackingConfig(
|
||||
baseCurrency=CurrencyCode.GBP,
|
||||
events=[],
|
||||
dataWarehouseTables=[
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue",
|
||||
timestampColumn="timestamp",
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
MULTIPLE_TABLES_REVENUE_TRACKING_CONFIG = RevenueTrackingConfig(
|
||||
baseCurrency=CurrencyCode.GBP,
|
||||
events=[],
|
||||
dataWarehouseTables=[
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column_a",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue_a",
|
||||
timestampColumn="timestamp",
|
||||
),
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column_b",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue_b",
|
||||
timestampColumn="timestamp",
|
||||
revenueCurrencyColumn=RevenueCurrencyPropertyConfig(static=CurrencyCode.EUR),
|
||||
),
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column_c",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue_c",
|
||||
timestampColumn="timestamp",
|
||||
revenueCurrencyColumn=RevenueCurrencyPropertyConfig(property="currency"),
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# NOTE: This test works just fine if you run it in isolation,
|
||||
# but it will crash if you run it with other tests because Clickhouse
|
||||
# runs UNION ALL queries in parallel, and we can't have that in tests
|
||||
# because it'll raise the following error:
|
||||
# clickhouse_driver.errors.PartiallyConsumedQueryError: Simultaneous queries on single connection detected
|
||||
#
|
||||
# Let's skip it for now until we figure out how to fix it
|
||||
@pytest.mark.skipif("CI" in os.environ, reason="Test skipped in CI environment")
|
||||
@snapshot_clickhouse_queries
|
||||
class TestRevenueExampleDataWarehouseTablesQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
QUERY_TIMESTAMP = "2025-01-29"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
# Register tables in Django DB for proper HogQL access
|
||||
self._register_warehouse_tables()
|
||||
|
||||
def _register_warehouse_tables(self):
|
||||
# Create credential for each table (required by the model)
|
||||
self.credential = DataWarehouseCredential.objects.create(
|
||||
team=self.team,
|
||||
access_key="test-key",
|
||||
access_secret="test-secret",
|
||||
)
|
||||
|
||||
# Register tables in Django - this doesn't create anything in Clickhouse
|
||||
# It just registers the schema for HogQL to use
|
||||
self.tables = []
|
||||
|
||||
# First table
|
||||
table_1 = DataWarehouseTable.objects.create(
|
||||
name="database_with_revenue_column",
|
||||
format=DataWarehouseTable.TableFormat.Parquet, # Parquet is commonly used in other tests
|
||||
team=self.team,
|
||||
credential=self.credential,
|
||||
url_pattern="test://localhost", # Doesn't matter for tests
|
||||
columns={
|
||||
"id": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
"revenue": {"hogql": "FloatDatabaseField", "clickhouse": "Float64", "schema_valid": True},
|
||||
"timestamp": {"hogql": "DateTimeDatabaseField", "clickhouse": "DateTime", "schema_valid": True},
|
||||
},
|
||||
)
|
||||
self.tables.append(table_1)
|
||||
|
||||
# Second table
|
||||
table_2 = DataWarehouseTable.objects.create(
|
||||
name="database_with_revenue_column_a",
|
||||
format=DataWarehouseTable.TableFormat.Parquet,
|
||||
team=self.team,
|
||||
credential=self.credential,
|
||||
url_pattern="test://localhost", # Doesn't matter for tests
|
||||
columns={
|
||||
"id": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
"revenue_a": {"hogql": "FloatDatabaseField", "clickhouse": "Float64", "schema_valid": True},
|
||||
"timestamp": {"hogql": "DateTimeDatabaseField", "clickhouse": "DateTime", "schema_valid": True},
|
||||
},
|
||||
)
|
||||
self.tables.append(table_2)
|
||||
|
||||
# Third table
|
||||
table_3 = DataWarehouseTable.objects.create(
|
||||
name="database_with_revenue_column_b",
|
||||
format=DataWarehouseTable.TableFormat.Parquet,
|
||||
team=self.team,
|
||||
credential=self.credential,
|
||||
url_pattern="test://localhost", # Doesn't matter for tests
|
||||
columns={
|
||||
"id": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
"revenue_b": {"hogql": "FloatDatabaseField", "clickhouse": "Float64", "schema_valid": True},
|
||||
"timestamp": {"hogql": "DateTimeDatabaseField", "clickhouse": "DateTime", "schema_valid": True},
|
||||
},
|
||||
)
|
||||
self.tables.append(table_3)
|
||||
|
||||
# Fourth table
|
||||
table_4 = DataWarehouseTable.objects.create(
|
||||
name="database_with_revenue_column_c",
|
||||
format=DataWarehouseTable.TableFormat.Parquet,
|
||||
team=self.team,
|
||||
credential=self.credential,
|
||||
url_pattern="test://localhost", # Doesn't matter for tests
|
||||
columns={
|
||||
"id": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
"revenue_c": {"hogql": "FloatDatabaseField", "clickhouse": "Float64", "schema_valid": True},
|
||||
"currency": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
"timestamp": {"hogql": "DateTimeDatabaseField", "clickhouse": "DateTime", "schema_valid": True},
|
||||
},
|
||||
)
|
||||
self.tables.append(table_4)
|
||||
|
||||
def _run_revenue_example_external_tables_query(
|
||||
self,
|
||||
revenue_tracking_config: RevenueTrackingConfig,
|
||||
limit_context: Optional[LimitContext] = None,
|
||||
):
|
||||
with freeze_time(self.QUERY_TIMESTAMP):
|
||||
query = RevenueExampleDataWarehouseTablesQuery(
|
||||
revenueTrackingConfig=revenue_tracking_config,
|
||||
)
|
||||
runner = RevenueExampleDataWarehouseTablesQueryRunner(
|
||||
team=self.team, query=query, limit_context=limit_context
|
||||
)
|
||||
response = runner.calculate()
|
||||
RevenueExampleDataWarehouseTablesQueryResponse.model_validate(response)
|
||||
return response
|
||||
|
||||
def tearDown(self):
|
||||
# Clean up the Django database tables
|
||||
for table in self.tables:
|
||||
table.delete()
|
||||
self.credential.delete()
|
||||
super().tearDown()
|
||||
|
||||
def test_no_crash_when_no_data(self):
|
||||
results = self._run_revenue_example_external_tables_query(EMPTY_REVENUE_TRACKING_CONFIG).results
|
||||
|
||||
assert len(results) == 0
|
||||
|
||||
@patch(
|
||||
"clickhouse_driver.result.QueryResult.get_result",
|
||||
return_value=(
|
||||
[
|
||||
("database_with_revenue_column", "distinct_id_1", 42, "USD", 35, "GBP"),
|
||||
("database_with_revenue_column", "distinct_id_2", 43, "USD", 36, "GBP"),
|
||||
("database_with_revenue_column", "distinct_id_3", 44, "USD", 37, "GBP"),
|
||||
],
|
||||
(
|
||||
"String",
|
||||
"String",
|
||||
"Float64",
|
||||
"String",
|
||||
"Float64",
|
||||
"String",
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_single_table_query(self, mock_get_result):
|
||||
response = self._run_revenue_example_external_tables_query(SINGLE_TABLE_REVENUE_TRACKING_CONFIG)
|
||||
results = response.results
|
||||
|
||||
# 3 rows, 6 columns
|
||||
assert len(results) == 3
|
||||
assert len(results[0]) == 6
|
||||
assert len(response.columns) == 6
|
||||
|
||||
assert results[0] == ("database_with_revenue_column", "distinct_id_1", 42, "USD", 42, "GBP")
|
||||
assert results[1] == ("database_with_revenue_column", "distinct_id_2", 43, "USD", 43, "GBP")
|
||||
assert results[2] == ("database_with_revenue_column", "distinct_id_3", 44, "USD", 44, "GBP")
|
||||
|
||||
@patch(
|
||||
"clickhouse_driver.result.QueryResult.get_result",
|
||||
return_value=(
|
||||
[
|
||||
("database_with_revenue_column_a", "distinct_id_1", 42, "USD", 42, "GBP"),
|
||||
("database_with_revenue_column_a", "distinct_id_2", 43, "USD", 43, "GBP"),
|
||||
("database_with_revenue_column_a", "distinct_id_3", 44, "USD", 44, "GBP"),
|
||||
("database_with_revenue_column_b", "distinct_id_1", 43, "USD", 43, "GBP"),
|
||||
("database_with_revenue_column_b", "distinct_id_2", 44, "USD", 44, "GBP"),
|
||||
("database_with_revenue_column_b", "distinct_id_3", 45, "USD", 45, "GBP"),
|
||||
],
|
||||
(
|
||||
"String",
|
||||
"Float64",
|
||||
"String",
|
||||
"Float64",
|
||||
"String",
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_multiple_tables_query(self, mock_get_result):
|
||||
response = self._run_revenue_example_external_tables_query(MULTIPLE_TABLES_REVENUE_TRACKING_CONFIG)
|
||||
results = response.results
|
||||
|
||||
# 6 rows, 6 columns
|
||||
assert len(results) == 6
|
||||
assert len(results[0]) == 6
|
||||
assert len(response.columns) == 6
|
||||
|
||||
# Results are returned in the order defined by the SQL UNION ALL query
|
||||
# The first table from dataWarehouseTables should come first
|
||||
assert results[0] == ("database_with_revenue_column_a", "distinct_id_1", 42, "USD", 42, "GBP")
|
||||
assert results[1] == ("database_with_revenue_column_a", "distinct_id_2", 43, "USD", 43, "GBP")
|
||||
assert results[2] == ("database_with_revenue_column_a", "distinct_id_3", 44, "USD", 44, "GBP")
|
||||
assert results[3] == ("database_with_revenue_column_b", "distinct_id_1", 43, "USD", 43, "GBP")
|
||||
assert results[4] == ("database_with_revenue_column_b", "distinct_id_2", 44, "USD", 44, "GBP")
|
||||
assert results[5] == ("database_with_revenue_column_b", "distinct_id_3", 45, "USD", 45, "GBP")
|
||||
@@ -7,10 +7,6 @@ from posthog.clickhouse.client.execute import sync_execute
|
||||
from posthog.hogql.constants import LimitContext
|
||||
from posthog.hogql_queries.web_analytics.web_overview import WebOverviewQueryRunner
|
||||
from posthog.models import Action, Element, Cohort
|
||||
from posthog.warehouse.models import (
|
||||
DataWarehouseTable,
|
||||
DataWarehouseCredential,
|
||||
)
|
||||
from posthog.models.utils import uuid7
|
||||
from posthog.schema import (
|
||||
CompareFilter,
|
||||
@@ -26,7 +22,6 @@ from posthog.schema import (
|
||||
RevenueTrackingConfig,
|
||||
RevenueCurrencyPropertyConfig,
|
||||
RevenueTrackingEventItem,
|
||||
RevenueTrackingDataWarehouseTable,
|
||||
)
|
||||
from posthog.settings import HOGQL_INCREASED_MAX_EXECUTION_TIME
|
||||
from posthog.test.base import (
|
||||
@@ -35,7 +30,6 @@ from posthog.test.base import (
|
||||
_create_event,
|
||||
_create_person,
|
||||
snapshot_clickhouse_queries,
|
||||
patch_clickhouse_client_execute,
|
||||
)
|
||||
|
||||
|
||||
@@ -91,25 +85,6 @@ class TestWebOverviewQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
)
|
||||
return person_result
|
||||
|
||||
def _create_data_warehouse_table(self, name: str, revenue_column: str, timestamp_column: str, currency_column: str):
|
||||
return DataWarehouseTable.objects.create(
|
||||
name=name,
|
||||
format=DataWarehouseTable.TableFormat.Parquet, # Parquet is commonly used in other tests
|
||||
team=self.team,
|
||||
credential=DataWarehouseCredential.objects.create(
|
||||
team=self.team,
|
||||
access_key="test-key",
|
||||
access_secret="test-secret",
|
||||
),
|
||||
url_pattern="test://localhost", # Doesn't matter for tests
|
||||
columns={
|
||||
"id": {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
revenue_column: {"hogql": "FloatDatabaseField", "clickhouse": "Float64", "schema_valid": True},
|
||||
timestamp_column: {"hogql": "DateTimeDatabaseField", "clickhouse": "DateTime", "schema_valid": True},
|
||||
currency_column: {"hogql": "StringDatabaseField", "clickhouse": "String", "schema_valid": True},
|
||||
},
|
||||
)
|
||||
|
||||
def _run_web_overview_query(
|
||||
self,
|
||||
date_from: str,
|
||||
@@ -778,62 +753,6 @@ class TestWebOverviewQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
assert revenue.kind == "currency"
|
||||
assert revenue.value is None
|
||||
|
||||
@patch("posthoganalytics.feature_enabled", return_value=True)
|
||||
def test_revenue_with_data_warehouse_table(self, feature_enabled_mock):
|
||||
# Create two different data warehouse tables to guarantee they're both added to the query
|
||||
self._create_data_warehouse_table("database_with_revenue_column_1", "revenue_1", "timestamp_1", "currency_1")
|
||||
self._create_data_warehouse_table("database_with_revenue_column_2", "revenue_2", "timestamp_2", "currency_2")
|
||||
|
||||
self.team.revenue_tracking_config = RevenueTrackingConfig(
|
||||
baseCurrency=CurrencyCode.GBP,
|
||||
events=[
|
||||
RevenueTrackingEventItem(
|
||||
eventName="purchase",
|
||||
revenueProperty="revenue",
|
||||
revenueCurrencyProperty=RevenueCurrencyPropertyConfig(property="currency"),
|
||||
)
|
||||
],
|
||||
dataWarehouseTables=[
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column_1",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue_1",
|
||||
timestampColumn="timestamp_1",
|
||||
revenueCurrencyColumn=RevenueCurrencyPropertyConfig(property="currency_1"),
|
||||
),
|
||||
RevenueTrackingDataWarehouseTable(
|
||||
tableName="database_with_revenue_column_2",
|
||||
distinctIdColumn="id",
|
||||
revenueColumn="revenue_2",
|
||||
timestampColumn="timestamp_2",
|
||||
revenueCurrencyColumn=RevenueCurrencyPropertyConfig(static=CurrencyCode.EUR),
|
||||
),
|
||||
],
|
||||
).model_dump()
|
||||
self.team.save()
|
||||
|
||||
self._create_events(
|
||||
[
|
||||
("p1", [("2023-12-02", str(uuid7("2023-12-02")), 100, "BRL")]),
|
||||
],
|
||||
event="purchase",
|
||||
)
|
||||
|
||||
# Spy on the `clichhouse_driver.Client.execute` method to avoid querying the data warehouse tables
|
||||
def execute_wrapper(original_client_execute, query, *args, **kwargs):
|
||||
# Visitors, Views, Session, Duration, Bounce, Revenue
|
||||
# all times two because it's current/previous
|
||||
if "database_with_revenue_column" in query:
|
||||
return ([[0] * 6 * 2], [])
|
||||
|
||||
return original_client_execute(query, *args, **kwargs)
|
||||
|
||||
# Run the query, but don't assert on the output because we're mocking it above
|
||||
# We're interested in the queries that were executed
|
||||
# This is asserted by the global `@snapshot_clickhouse_queries` decorator
|
||||
with patch_clickhouse_client_execute(execute_wrapper):
|
||||
self._run_web_overview_query("2023-12-01", "2023-12-03", include_revenue=True)
|
||||
|
||||
def test_revenue_conversion_event(self):
|
||||
s1 = str(uuid7("2023-12-02"))
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ from abc import ABC
|
||||
from datetime import timedelta, datetime
|
||||
from math import ceil
|
||||
from typing import Optional, Union
|
||||
import posthoganalytics
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
@@ -44,17 +43,6 @@ WebQueryNode = Union[
|
||||
class WebAnalyticsQueryRunner(QueryRunner, ABC):
|
||||
query: WebQueryNode
|
||||
query_type: type[WebQueryNode]
|
||||
include_data_warehouse_revenue: bool = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.include_data_warehouse_revenue = posthoganalytics.feature_enabled(
|
||||
"web-analytics-data-warehouse-revenue-settings",
|
||||
str(self.team.organization_id),
|
||||
groups={"organization": str(self.team.organization_id)},
|
||||
group_properties={"organization": {"id": str(self.team.organization_id)}},
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def query_date_range(self):
|
||||
@@ -256,28 +244,28 @@ class WebAnalyticsQueryRunner(QueryRunner, ABC):
|
||||
end: ast.Expr,
|
||||
alias: Optional[str] = None,
|
||||
params: Optional[list[ast.Expr]] = None,
|
||||
extra_args: Optional[list[ast.Expr]] = None,
|
||||
):
|
||||
and_args: list[ast.Expr] = [
|
||||
ast.CompareOperation(
|
||||
op=ast.CompareOperationOp.GtEq,
|
||||
left=ast.Field(chain=["start_timestamp"]),
|
||||
right=start,
|
||||
),
|
||||
ast.CompareOperation(
|
||||
op=ast.CompareOperationOp.Lt,
|
||||
left=ast.Field(chain=["start_timestamp"]),
|
||||
right=end,
|
||||
),
|
||||
]
|
||||
|
||||
if extra_args:
|
||||
and_args.extend(extra_args)
|
||||
|
||||
expr = ast.Call(
|
||||
name=function_name + "If",
|
||||
params=params,
|
||||
args=[ast.Field(chain=[column_name]), ast.Call(name="and", args=and_args)],
|
||||
args=[
|
||||
ast.Field(chain=[column_name]),
|
||||
ast.Call(
|
||||
name="and",
|
||||
args=[
|
||||
ast.CompareOperation(
|
||||
op=ast.CompareOperationOp.GtEq,
|
||||
left=ast.Field(chain=["start_timestamp"]),
|
||||
right=start,
|
||||
),
|
||||
ast.CompareOperation(
|
||||
op=ast.CompareOperationOp.Lt,
|
||||
left=ast.Field(chain=["start_timestamp"]),
|
||||
right=end,
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
if alias is not None:
|
||||
|
||||
@@ -14,10 +14,7 @@ from posthog.schema import (
|
||||
WebOverviewQueryResponse,
|
||||
WebOverviewQuery,
|
||||
)
|
||||
from posthog.hogql.database.schema.exchange_rate import (
|
||||
revenue_sum_expression_for_events,
|
||||
revenue_expression_for_data_warehouse,
|
||||
)
|
||||
from posthog.hogql.database.schema.exchange_rate import revenue_sum_expression_for_events
|
||||
|
||||
|
||||
class WebOverviewQueryRunner(WebAnalyticsQueryRunner):
|
||||
@@ -25,7 +22,7 @@ class WebOverviewQueryRunner(WebAnalyticsQueryRunner):
|
||||
response: WebOverviewQueryResponse
|
||||
cached_response: CachedWebOverviewQueryResponse
|
||||
|
||||
def to_query(self) -> ast.SelectQuery | ast.SelectSetQuery:
|
||||
def to_query(self) -> ast.SelectQuery:
|
||||
return self.outer_select
|
||||
|
||||
def calculate(self):
|
||||
@@ -162,7 +159,6 @@ HAVING {inside_start_timestamp_period}
|
||||
column_name: str,
|
||||
alias: str,
|
||||
params: Optional[list[ast.Expr]] = None,
|
||||
extra_args: Optional[list[ast.Expr]] = None,
|
||||
):
|
||||
if not self.query_compare_to_date_range:
|
||||
return ast.Call(name=function_name, params=params, args=[ast.Field(chain=[column_name])])
|
||||
@@ -174,7 +170,6 @@ HAVING {inside_start_timestamp_period}
|
||||
self.query_date_range.date_to_as_hogql(),
|
||||
alias=alias,
|
||||
params=params,
|
||||
extra_args=extra_args,
|
||||
)
|
||||
|
||||
def previous_period_aggregate(
|
||||
@@ -182,7 +177,6 @@ HAVING {inside_start_timestamp_period}
|
||||
column_name: str,
|
||||
alias: str,
|
||||
params: Optional[list[ast.Expr]] = None,
|
||||
extra_args: Optional[list[ast.Expr]] = None,
|
||||
):
|
||||
if not self.query_compare_to_date_range:
|
||||
return ast.Alias(alias=alias, expr=ast.Constant(value=None))
|
||||
@@ -194,31 +188,16 @@ HAVING {inside_start_timestamp_period}
|
||||
self.query_compare_to_date_range.date_to_as_hogql(),
|
||||
alias=alias,
|
||||
params=params,
|
||||
extra_args=extra_args,
|
||||
)
|
||||
|
||||
session_id_is_not_null = ast.Call(name="isNotNull", args=[ast.Field(chain=["session_id"])])
|
||||
|
||||
if self.query.conversionGoal:
|
||||
select = [
|
||||
current_period_aggregate(
|
||||
"uniq", "session_person_id", "unique_users", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"uniq", "session_person_id", "previous_unique_users", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate(
|
||||
"sum", "conversion_count", "total_conversion_count", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"sum", "conversion_count", "previous_total_conversion_count", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate(
|
||||
"uniq", "conversion_person_id", "unique_conversions", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"uniq", "conversion_person_id", "previous_unique_conversions", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate("uniq", "session_person_id", "unique_users"),
|
||||
previous_period_aggregate("uniq", "session_person_id", "previous_unique_users"),
|
||||
current_period_aggregate("sum", "conversion_count", "total_conversion_count"),
|
||||
previous_period_aggregate("sum", "conversion_count", "previous_total_conversion_count"),
|
||||
current_period_aggregate("uniq", "conversion_person_id", "unique_conversions"),
|
||||
previous_period_aggregate("uniq", "conversion_person_id", "previous_unique_conversions"),
|
||||
ast.Alias(
|
||||
alias="conversion_rate",
|
||||
expr=ast.Call(
|
||||
@@ -236,60 +215,28 @@ HAVING {inside_start_timestamp_period}
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
if self.query.includeRevenue:
|
||||
select.extend(
|
||||
[
|
||||
current_period_aggregate(
|
||||
"sum",
|
||||
"session_conversion_revenue",
|
||||
"conversion_revenue",
|
||||
extra_args=[session_id_is_not_null],
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"sum",
|
||||
"session_conversion_revenue",
|
||||
"previous_conversion_revenue",
|
||||
extra_args=[session_id_is_not_null],
|
||||
),
|
||||
current_period_aggregate("sum", "session_conversion_revenue", "conversion_revenue"),
|
||||
previous_period_aggregate("sum", "session_conversion_revenue", "previous_conversion_revenue"),
|
||||
]
|
||||
)
|
||||
else:
|
||||
select = [
|
||||
current_period_aggregate(
|
||||
"uniq", "session_person_id", "unique_users", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"uniq", "session_person_id", "previous_unique_users", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate(
|
||||
"sum",
|
||||
"filtered_pageview_count",
|
||||
"total_filtered_pageview_count",
|
||||
extra_args=[session_id_is_not_null],
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"sum",
|
||||
"filtered_pageview_count",
|
||||
"previous_filtered_pageview_count",
|
||||
extra_args=[session_id_is_not_null],
|
||||
),
|
||||
current_period_aggregate("uniq", "session_id", "unique_sessions", extra_args=[session_id_is_not_null]),
|
||||
previous_period_aggregate(
|
||||
"uniq", "session_id", "previous_unique_sessions", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate(
|
||||
"avg", "session_duration", "avg_duration_s", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
previous_period_aggregate(
|
||||
"avg", "session_duration", "prev_avg_duration_s", extra_args=[session_id_is_not_null]
|
||||
),
|
||||
current_period_aggregate("avg", "is_bounce", "bounce_rate", extra_args=[session_id_is_not_null]),
|
||||
previous_period_aggregate("avg", "is_bounce", "prev_bounce_rate", extra_args=[session_id_is_not_null]),
|
||||
current_period_aggregate("uniq", "session_person_id", "unique_users"),
|
||||
previous_period_aggregate("uniq", "session_person_id", "previous_unique_users"),
|
||||
current_period_aggregate("sum", "filtered_pageview_count", "total_filtered_pageview_count"),
|
||||
previous_period_aggregate("sum", "filtered_pageview_count", "previous_filtered_pageview_count"),
|
||||
current_period_aggregate("uniq", "session_id", "unique_sessions"),
|
||||
previous_period_aggregate("uniq", "session_id", "previous_unique_sessions"),
|
||||
current_period_aggregate("avg", "session_duration", "avg_duration_s"),
|
||||
previous_period_aggregate("avg", "session_duration", "prev_avg_duration_s"),
|
||||
current_period_aggregate("avg", "is_bounce", "bounce_rate"),
|
||||
previous_period_aggregate("avg", "is_bounce", "prev_bounce_rate"),
|
||||
]
|
||||
|
||||
# NOTE: This won't include `session_id_is_not_null` because
|
||||
# we want to include revenue coming from Data Warehouse tables
|
||||
# and those won't contain `session_id`
|
||||
if self.query.includeRevenue:
|
||||
select.extend(
|
||||
[
|
||||
@@ -298,75 +245,7 @@ HAVING {inside_start_timestamp_period}
|
||||
]
|
||||
)
|
||||
|
||||
query = ast.SelectQuery(select=select, select_from=ast.JoinExpr(table=self.inner_select))
|
||||
|
||||
# If we can find some selects for DW revenue, then join it with that instead of just the inner select
|
||||
if self.data_warehouse_revenue_selects:
|
||||
query.select_from = ast.JoinExpr(
|
||||
table=ast.SelectSetQuery.create_from_queries(
|
||||
[self.inner_select, *self.data_warehouse_revenue_selects],
|
||||
set_operator="UNION ALL",
|
||||
)
|
||||
)
|
||||
|
||||
assert isinstance(query, ast.SelectQuery)
|
||||
return query
|
||||
|
||||
@cached_property
|
||||
def data_warehouse_revenue_selects(self) -> list[ast.SelectQuery]:
|
||||
if not self.include_data_warehouse_revenue:
|
||||
return []
|
||||
|
||||
if not self.query.includeRevenue:
|
||||
return []
|
||||
|
||||
if not self.team.revenue_config.dataWarehouseTables:
|
||||
return []
|
||||
|
||||
queries: list[ast.SelectQuery] = []
|
||||
|
||||
# This is a little bit complicated, but here's the gist of it:
|
||||
#
|
||||
# We need to include the same amount of columns in this select query as in the inner select query
|
||||
# It also needs to be in the exact same order because ClickHouse doesn't care about the column names
|
||||
# from subsequent queries in a SelectSetQuery, it only cares about the names of the first query
|
||||
# and then the positions of the columns in subsequent queries.
|
||||
#
|
||||
# So we need to iterate over the columns in the inner select query and create a new alias for each column.
|
||||
# Because we don't care about the value, and we actually want to ignore them in the main query,
|
||||
# we set them to `None` and then replace `session_revenue` and `start_timestamp` with the
|
||||
# revenue column and timestamp column from the data warehouse table respectively.
|
||||
for table in self.team.revenue_config.dataWarehouseTables:
|
||||
select_columns: list[ast.Expr] = []
|
||||
for select in self.inner_select.select:
|
||||
if not isinstance(select, ast.Alias): # Guarantee type-safety
|
||||
continue
|
||||
|
||||
new_select = ast.Alias(alias=select.alias, expr=ast.Constant(value=None))
|
||||
|
||||
# Only care about timestamp and revenue, keep the rest as None
|
||||
if select.alias == "start_timestamp":
|
||||
new_select = ast.Alias(
|
||||
alias=select.alias,
|
||||
expr=ast.Field(chain=[table.tableName, table.timestampColumn]),
|
||||
)
|
||||
elif select.alias == "session_revenue":
|
||||
new_select = ast.Alias(
|
||||
alias=select.alias,
|
||||
expr=revenue_expression_for_data_warehouse(self.team.revenue_config, table),
|
||||
)
|
||||
|
||||
select_columns.append(new_select)
|
||||
|
||||
queries.append(
|
||||
ast.SelectQuery(
|
||||
select=select_columns,
|
||||
select_from=ast.JoinExpr(table=ast.Field(chain=[table.tableName])),
|
||||
where=self._periods_expression("start_timestamp"),
|
||||
)
|
||||
)
|
||||
|
||||
return queries
|
||||
return ast.SelectQuery(select=select, select_from=ast.JoinExpr(table=self.inner_select))
|
||||
|
||||
|
||||
def to_data(
|
||||
|
||||
@@ -1692,19 +1692,6 @@ class RevenueCurrencyPropertyConfig(BaseModel):
|
||||
static: Optional[CurrencyCode] = None
|
||||
|
||||
|
||||
class RevenueTrackingDataWarehouseTable(BaseModel):
|
||||
model_config = ConfigDict(
|
||||
extra="forbid",
|
||||
)
|
||||
distinctIdColumn: str
|
||||
revenueColumn: str
|
||||
revenueCurrencyColumn: Optional[RevenueCurrencyPropertyConfig] = Field(
|
||||
default_factory=lambda: RevenueCurrencyPropertyConfig.model_validate({"static": "USD"})
|
||||
)
|
||||
tableName: str
|
||||
timestampColumn: str
|
||||
|
||||
|
||||
class RevenueTrackingEventItem(BaseModel):
|
||||
model_config = ConfigDict(
|
||||
extra="forbid",
|
||||
@@ -3078,7 +3065,6 @@ class RevenueTrackingConfig(BaseModel):
|
||||
extra="forbid",
|
||||
)
|
||||
baseCurrency: Optional[CurrencyCode] = CurrencyCode.USD
|
||||
dataWarehouseTables: Optional[list[RevenueTrackingDataWarehouseTable]] = []
|
||||
events: Optional[list[RevenueTrackingEventItem]] = []
|
||||
|
||||
|
||||
@@ -6867,7 +6853,6 @@ class RevenueExampleDataWarehouseTablesQuery(BaseModel):
|
||||
)
|
||||
offset: Optional[int] = None
|
||||
response: Optional[RevenueExampleDataWarehouseTablesQueryResponse] = None
|
||||
revenueTrackingConfig: RevenueTrackingConfig
|
||||
|
||||
|
||||
class RevenueExampleEventsQuery(BaseModel):
|
||||
@@ -6881,7 +6866,6 @@ class RevenueExampleEventsQuery(BaseModel):
|
||||
)
|
||||
offset: Optional[int] = None
|
||||
response: Optional[RevenueExampleEventsQueryResponse] = None
|
||||
revenueTrackingConfig: RevenueTrackingConfig
|
||||
|
||||
|
||||
class SessionAttributionExplorerQuery(BaseModel):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,9 +11,10 @@ from posthog.utils_cors import CORS_ALLOWED_TRACING_HEADERS
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
# django-axes settings to lockout after too many attempts
|
||||
|
||||
####
|
||||
# django-axes
|
||||
|
||||
# lockout after too many attempts
|
||||
AXES_ENABLED = get_from_env("AXES_ENABLED", not TEST, type_cast=str_to_bool)
|
||||
AXES_HANDLER = "axes.handlers.cache.AxesCacheHandler"
|
||||
AXES_FAILURE_LIMIT = get_from_env("AXES_FAILURE_LIMIT", 30, type_cast=int)
|
||||
@@ -21,49 +22,17 @@ AXES_COOLOFF_TIME = timedelta(minutes=10)
|
||||
AXES_LOCKOUT_CALLABLE = "posthog.api.authentication.axes_locked_out"
|
||||
AXES_META_PRECEDENCE_ORDER = ["HTTP_X_FORWARDED_FOR", "REMOTE_ADDR"]
|
||||
|
||||
# Decide rate limit setting
|
||||
|
||||
DECIDE_RATE_LIMIT_ENABLED = get_from_env("DECIDE_RATE_LIMIT_ENABLED", False, type_cast=str_to_bool)
|
||||
DECIDE_BUCKET_CAPACITY = get_from_env("DECIDE_BUCKET_CAPACITY", type_cast=int, default=500)
|
||||
DECIDE_BUCKET_REPLENISH_RATE = get_from_env("DECIDE_BUCKET_REPLENISH_RATE", type_cast=float, default=10.0)
|
||||
|
||||
# Prevent decide abuse
|
||||
|
||||
# This is a list of team-ids that are prevented from using the /decide endpoint
|
||||
# until they fix an issue with their feature flags causing instability in posthog.
|
||||
DECIDE_SHORT_CIRCUITED_TEAM_IDS = [0]
|
||||
# Decide db settings
|
||||
|
||||
DECIDE_SKIP_POSTGRES_FLAGS = get_from_env("DECIDE_SKIP_POSTGRES_FLAGS", False, type_cast=str_to_bool)
|
||||
|
||||
# Decide billing analytics
|
||||
|
||||
DECIDE_BILLING_SAMPLING_RATE = get_from_env("DECIDE_BILLING_SAMPLING_RATE", 0.1, type_cast=float)
|
||||
DECIDE_BILLING_ANALYTICS_TOKEN = get_from_env("DECIDE_BILLING_ANALYTICS_TOKEN", None, type_cast=str, optional=True)
|
||||
|
||||
# Decide regular request analytics
|
||||
# Takes 3 possible formats, all separated by commas:
|
||||
# A number: "2"
|
||||
# A range: "2:5" -- represents team IDs 2, 3, 4, 5
|
||||
# The string "all" -- represents all team IDs
|
||||
DECIDE_TRACK_TEAM_IDS = get_list(os.getenv("DECIDE_TRACK_TEAM_IDS", ""))
|
||||
|
||||
# Decide skip hash key overrides
|
||||
DECIDE_SKIP_HASH_KEY_OVERRIDE_WRITES = get_from_env(
|
||||
"DECIDE_SKIP_HASH_KEY_OVERRIDE_WRITES", False, type_cast=str_to_bool
|
||||
)
|
||||
|
||||
# if `true` we disable session replay if over quota
|
||||
DECIDE_SESSION_REPLAY_QUOTA_CHECK = get_from_env("DECIDE_SESSION_REPLAY_QUOTA_CHECK", False, type_cast=str_to_bool)
|
||||
|
||||
# if `true` we disable feature flags if over quota
|
||||
DECIDE_FEATURE_FLAG_QUOTA_CHECK = get_from_env("DECIDE_FEATURE_FLAG_QUOTA_CHECK", False, type_cast=str_to_bool)
|
||||
|
||||
# if `true` we highly increase the rate limit on /query endpoint and limit the number of concurrent queries
|
||||
API_QUERIES_ENABLED = get_from_env("API_QUERIES_ENABLED", False, type_cast=str_to_bool)
|
||||
|
||||
####
|
||||
# Application definition
|
||||
|
||||
# TODO: Automatically generate these like we do for the frontend
|
||||
# NOTE: Add these definitions here and on `tach.toml`
|
||||
PRODUCTS_APPS = [
|
||||
"products.early_access_features",
|
||||
"products.editor",
|
||||
"products.revenue_analytics",
|
||||
]
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"whitenoise.runserver_nostatic", # makes sure that whitenoise handles static files in development
|
||||
"django.contrib.admin",
|
||||
@@ -81,19 +50,18 @@ INSTALLED_APPS = [
|
||||
"django_filters",
|
||||
"axes",
|
||||
"drf_spectacular",
|
||||
*PRODUCTS_APPS,
|
||||
"django_otp",
|
||||
"django_otp.plugins.otp_static",
|
||||
"django_otp.plugins.otp_totp",
|
||||
# 'django_otp.plugins.otp_email', # <- if you want email capability.
|
||||
# See above for automatically generated apps for all of our products
|
||||
"two_factor",
|
||||
# 'two_factor.plugins.phonenumber', # <- if you want phone number capability.
|
||||
# 'two_factor.plugins.email', # <- if you want email capability.
|
||||
# 'two_factor.plugins.yubikey', # <- for yubikey capability.
|
||||
"products.early_access_features", # TODO: add this automatically
|
||||
"products.editor",
|
||||
]
|
||||
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django_prometheus.middleware.PrometheusBeforeMiddleware",
|
||||
"posthog.gzip_middleware.ScopedGZipMiddleware",
|
||||
@@ -149,6 +117,7 @@ except ImportError:
|
||||
else:
|
||||
INSTALLED_APPS.append("django_extensions")
|
||||
|
||||
# Django builtin setting
|
||||
# Max size of a POST body (for event ingestion)
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = 20971520 # 20 MB
|
||||
|
||||
@@ -174,11 +143,8 @@ TEMPLATES = [
|
||||
WSGI_APPLICATION = "posthog.wsgi.application"
|
||||
|
||||
|
||||
# Social Auth
|
||||
|
||||
SOCIAL_AUTH_JSONFIELD_ENABLED = True
|
||||
SOCIAL_AUTH_USER_MODEL = "posthog.User"
|
||||
SOCIAL_AUTH_REDIRECT_IS_HTTPS: bool = get_from_env("SOCIAL_AUTH_REDIRECT_IS_HTTPS", not DEBUG, type_cast=str_to_bool)
|
||||
####
|
||||
# Authentication
|
||||
|
||||
AUTHENTICATION_BACKENDS: list[str] = [
|
||||
"axes.backends.AxesBackend",
|
||||
@@ -187,6 +153,20 @@ AUTHENTICATION_BACKENDS: list[str] = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
]
|
||||
|
||||
AUTH_USER_MODEL = "posthog.User"
|
||||
|
||||
LOGIN_URL = "/login"
|
||||
LOGOUT_URL = "/logout"
|
||||
LOGIN_REDIRECT_URL = "/"
|
||||
APPEND_SLASH = False
|
||||
CORS_URLS_REGEX = r"^(/site_app/|/array/|/api/(?!early_access_features|surveys|web_experiments).*$)"
|
||||
CORS_ALLOW_HEADERS = default_headers + CORS_ALLOWED_TRACING_HEADERS
|
||||
X_FRAME_OPTIONS = "SAMEORIGIN"
|
||||
|
||||
SOCIAL_AUTH_JSONFIELD_ENABLED = True
|
||||
SOCIAL_AUTH_USER_MODEL = "posthog.User"
|
||||
SOCIAL_AUTH_REDIRECT_IS_HTTPS: bool = get_from_env("SOCIAL_AUTH_REDIRECT_IS_HTTPS", not DEBUG, type_cast=str_to_bool)
|
||||
|
||||
SOCIAL_AUTH_PIPELINE = (
|
||||
"social_core.pipeline.social_auth.social_details",
|
||||
"social_core.pipeline.social_auth.social_uid",
|
||||
@@ -216,9 +196,34 @@ SOCIAL_AUTH_GITLAB_KEY: str | None = os.getenv("SOCIAL_AUTH_GITLAB_KEY")
|
||||
SOCIAL_AUTH_GITLAB_SECRET: str | None = os.getenv("SOCIAL_AUTH_GITLAB_SECRET")
|
||||
SOCIAL_AUTH_GITLAB_API_URL: str = os.getenv("SOCIAL_AUTH_GITLAB_API_URL", "https://gitlab.com")
|
||||
|
||||
# Cookie age in seconds (default 2 weeks) - these are the standard defaults for Django but having it here to be explicit
|
||||
SESSION_COOKIE_AGE = get_from_env("SESSION_COOKIE_AGE", 60 * 60 * 24 * 14, type_cast=int)
|
||||
|
||||
# For sensitive actions we have an additional permission (default 1 hour)
|
||||
SESSION_SENSITIVE_ACTIONS_AGE = get_from_env("SESSION_SENSITIVE_ACTIONS_AGE", 60 * 60 * 6, type_cast=int)
|
||||
|
||||
CSRF_COOKIE_NAME = "posthog_csrftoken"
|
||||
CSRF_COOKIE_AGE = get_from_env("CSRF_COOKIE_AGE", SESSION_COOKIE_AGE, type_cast=int)
|
||||
|
||||
# The total time allowed for an impersonated session
|
||||
IMPERSONATION_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_TIMEOUT_SECONDS", 60 * 60 * 2, type_cast=int)
|
||||
# The time allowed for an impersonated session to be idle before it expires
|
||||
IMPERSONATION_IDLE_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_IDLE_TIMEOUT_SECONDS", 30 * 60, type_cast=int)
|
||||
# Impersonation cookie last activity key
|
||||
IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY = get_from_env(
|
||||
"IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY", "impersonation_last_activity"
|
||||
)
|
||||
|
||||
SESSION_COOKIE_CREATED_AT_KEY = get_from_env("SESSION_COOKIE_CREATED_AT_KEY", "session_created_at")
|
||||
|
||||
PROJECT_SWITCHING_TOKEN_ALLOWLIST = get_list(os.getenv("PROJECT_SWITCHING_TOKEN_ALLOWLIST", "sTMFPsFhdP1Ssg"))
|
||||
|
||||
####
|
||||
# 2FA
|
||||
|
||||
TWO_FACTOR_REMEMBER_COOKIE_AGE = 60 * 60 * 24 * 30
|
||||
|
||||
####
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
|
||||
|
||||
@@ -229,20 +234,17 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
|
||||
PASSWORD_RESET_TIMEOUT = 86_400 # 1 day
|
||||
|
||||
####
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/2.2/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = "en-us"
|
||||
|
||||
TIME_ZONE = "UTC"
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
|
||||
####
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.2/howto/static-files/
|
||||
|
||||
@@ -260,15 +262,8 @@ def static_varies_origin(headers, path, url):
|
||||
|
||||
WHITENOISE_ADD_HEADERS_FUNCTION = static_varies_origin
|
||||
|
||||
AUTH_USER_MODEL = "posthog.User"
|
||||
|
||||
LOGIN_URL = "/login"
|
||||
LOGOUT_URL = "/logout"
|
||||
LOGIN_REDIRECT_URL = "/"
|
||||
APPEND_SLASH = False
|
||||
CORS_URLS_REGEX = r"^(/site_app/|/array/|/api/(?!early_access_features|surveys|web_experiments).*$)"
|
||||
CORS_ALLOW_HEADERS = default_headers + CORS_ALLOWED_TRACING_HEADERS
|
||||
X_FRAME_OPTIONS = "SAMEORIGIN"
|
||||
####
|
||||
# REST framework
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": ["posthog.auth.SessionAuthentication"],
|
||||
@@ -288,9 +283,12 @@ REST_FRAMEWORK = {
|
||||
# The default STRICT_JSON fails the whole request if the data can't be strictly JSON-serialized
|
||||
"STRICT_JSON": False,
|
||||
}
|
||||
|
||||
if DEBUG:
|
||||
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"].append("rest_framework.renderers.BrowsableAPIRenderer") # type: ignore
|
||||
|
||||
####
|
||||
# DRF Spectacular
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"AUTHENTICATION_WHITELIST": ["posthog.auth.PersonalAPIKeyAuthentication"],
|
||||
@@ -308,15 +306,8 @@ SPECTACULAR_SETTINGS = {
|
||||
|
||||
EXCEPTIONS_HOG = {"EXCEPTION_REPORTING": "posthog.exceptions.exception_reporting"}
|
||||
|
||||
# Cookie age in seconds (default 2 weeks) - these are the standard defaults for Django but having it here to be explicit
|
||||
SESSION_COOKIE_AGE = get_from_env("SESSION_COOKIE_AGE", 60 * 60 * 24 * 14, type_cast=int)
|
||||
|
||||
# For sensitive actions we have an additional permission (default 1 hour)
|
||||
SESSION_SENSITIVE_ACTIONS_AGE = get_from_env("SESSION_SENSITIVE_ACTIONS_AGE", 60 * 60 * 6, type_cast=int)
|
||||
|
||||
CSRF_COOKIE_NAME = "posthog_csrftoken"
|
||||
CSRF_COOKIE_AGE = get_from_env("CSRF_COOKIE_AGE", SESSION_COOKIE_AGE, type_cast=int)
|
||||
|
||||
####
|
||||
# Compression
|
||||
|
||||
# see posthog.gzip_middleware.ScopedGZipMiddleware
|
||||
# for how adding paths here can add vulnerability to the "breach" attack
|
||||
@@ -369,45 +360,69 @@ GZIP_RESPONSE_ALLOW_LIST = get_list(
|
||||
)
|
||||
)
|
||||
|
||||
KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS = int(os.getenv("KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS", None) or 10)
|
||||
|
||||
####
|
||||
# Prometheus Django metrics settings, see
|
||||
# https://github.com/korfuri/django-prometheus for more details
|
||||
|
||||
# We keep the number of buckets low to reduce resource usage on the Prometheus
|
||||
PROMETHEUS_LATENCY_BUCKETS = [0.1, 0.3, 0.9, 2.7, 8.1, float("inf")]
|
||||
|
||||
# temporary flag to control new UUID version setting in posthog-js
|
||||
# is set to v7 to test new generation but can be set to "og" to revert
|
||||
POSTHOG_JS_UUID_VERSION = os.getenv("POSTHOG_JS_UUID_VERSION", "v7")
|
||||
####
|
||||
# Proxy and IP egress config
|
||||
|
||||
# Used only to display in the UI to inform users of allowlist options
|
||||
PUBLIC_EGRESS_IP_ADDRESSES = get_list(os.getenv("PUBLIC_EGRESS_IP_ADDRESSES", ""))
|
||||
|
||||
# The total time allowed for an impersonated session
|
||||
IMPERSONATION_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_TIMEOUT_SECONDS", 60 * 60 * 2, type_cast=int)
|
||||
# The time allowed for an impersonated session to be idle before it expires
|
||||
IMPERSONATION_IDLE_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_IDLE_TIMEOUT_SECONDS", 30 * 60, type_cast=int)
|
||||
# Impersonation cookie last activity key
|
||||
IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY = get_from_env(
|
||||
"IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY", "impersonation_last_activity"
|
||||
)
|
||||
|
||||
SESSION_COOKIE_CREATED_AT_KEY = get_from_env("SESSION_COOKIE_CREATED_AT_KEY", "session_created_at")
|
||||
|
||||
PROJECT_SWITCHING_TOKEN_ALLOWLIST = get_list(os.getenv("PROJECT_SWITCHING_TOKEN_ALLOWLIST", "sTMFPsFhdP1Ssg"))
|
||||
|
||||
PROXY_PROVISIONER_URL = get_from_env("PROXY_PROVISIONER_URL", "") # legacy, from before gRPC
|
||||
PROXY_PROVISIONER_ADDR = get_from_env("PROXY_PROVISIONER_ADDR", "")
|
||||
PROXY_TARGET_CNAME = get_from_env("PROXY_TARGET_CNAME", "")
|
||||
PROXY_BASE_CNAME = get_from_env("PROXY_BASE_CNAME", "")
|
||||
|
||||
####
|
||||
# CDP
|
||||
|
||||
LOGO_DEV_TOKEN = get_from_env("LOGO_DEV_TOKEN", "")
|
||||
|
||||
# disables frontend side navigation hooks to make hot-reload work seamlessly
|
||||
DEV_DISABLE_NAVIGATION_HOOKS = get_from_env("DEV_DISABLE_NAVIGATION_HOOKS", False, type_cast=bool)
|
||||
####
|
||||
# /decide
|
||||
|
||||
# Decide rate limit setting
|
||||
DECIDE_RATE_LIMIT_ENABLED = get_from_env("DECIDE_RATE_LIMIT_ENABLED", False, type_cast=str_to_bool)
|
||||
DECIDE_BUCKET_CAPACITY = get_from_env("DECIDE_BUCKET_CAPACITY", type_cast=int, default=500)
|
||||
DECIDE_BUCKET_REPLENISH_RATE = get_from_env("DECIDE_BUCKET_REPLENISH_RATE", type_cast=float, default=10.0)
|
||||
|
||||
# This is a list of team-ids that are prevented from using the /decide endpoint
|
||||
# until they fix an issue with their feature flags causing instability in posthog.
|
||||
DECIDE_SHORT_CIRCUITED_TEAM_IDS = [0]
|
||||
|
||||
# Decide db settings
|
||||
DECIDE_SKIP_POSTGRES_FLAGS = get_from_env("DECIDE_SKIP_POSTGRES_FLAGS", False, type_cast=str_to_bool)
|
||||
|
||||
# Decide billing analytics
|
||||
DECIDE_BILLING_SAMPLING_RATE = get_from_env("DECIDE_BILLING_SAMPLING_RATE", 0.1, type_cast=float)
|
||||
DECIDE_BILLING_ANALYTICS_TOKEN = get_from_env("DECIDE_BILLING_ANALYTICS_TOKEN", None, type_cast=str, optional=True)
|
||||
|
||||
# Decide regular request analytics
|
||||
# Takes 3 possible formats, all separated by commas:
|
||||
# A number: "2"
|
||||
# A range: "2:5" -- represents team IDs 2, 3, 4, 5
|
||||
# The string "all" -- represents all team IDs
|
||||
DECIDE_TRACK_TEAM_IDS = get_list(os.getenv("DECIDE_TRACK_TEAM_IDS", ""))
|
||||
|
||||
# Decide skip hash key overrides
|
||||
DECIDE_SKIP_HASH_KEY_OVERRIDE_WRITES = get_from_env(
|
||||
"DECIDE_SKIP_HASH_KEY_OVERRIDE_WRITES", False, type_cast=str_to_bool
|
||||
)
|
||||
|
||||
# if `true` we disable session replay if over quota
|
||||
DECIDE_SESSION_REPLAY_QUOTA_CHECK = get_from_env("DECIDE_SESSION_REPLAY_QUOTA_CHECK", False, type_cast=str_to_bool)
|
||||
|
||||
# if `true` we disable feature flags if over quota
|
||||
DECIDE_FEATURE_FLAG_QUOTA_CHECK = get_from_env("DECIDE_FEATURE_FLAG_QUOTA_CHECK", False, type_cast=str_to_bool)
|
||||
|
||||
####
|
||||
# /remote_config
|
||||
REMOTE_CONFIG_DECIDE_ROLLOUT_PERCENTAGE = get_from_env("REMOTE_CONFIG_DECIDE_ROLLOUT_PERCENTAGE", 0.0, type_cast=float)
|
||||
|
||||
if REMOTE_CONFIG_DECIDE_ROLLOUT_PERCENTAGE > 1:
|
||||
@@ -417,10 +432,42 @@ if REMOTE_CONFIG_DECIDE_ROLLOUT_PERCENTAGE > 1:
|
||||
REMOTE_CONFIG_CDN_PURGE_ENDPOINT = get_from_env("REMOTE_CONFIG_CDN_PURGE_ENDPOINT", "")
|
||||
REMOTE_CONFIG_CDN_PURGE_TOKEN = get_from_env("REMOTE_CONFIG_CDN_PURGE_TOKEN", "")
|
||||
REMOTE_CONFIG_CDN_PURGE_DOMAINS = get_list(os.getenv("REMOTE_CONFIG_CDN_PURGE_DOMAINS", ""))
|
||||
|
||||
####
|
||||
# /capture
|
||||
|
||||
KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS = int(os.getenv("KAFKA_PRODUCE_ACK_TIMEOUT_SECONDS", None) or 10)
|
||||
|
||||
####
|
||||
# /query
|
||||
|
||||
# if `true` we highly increase the rate limit on /query endpoint and limit the number of concurrent queries
|
||||
API_QUERIES_ENABLED = get_from_env("API_QUERIES_ENABLED", False, type_cast=str_to_bool)
|
||||
|
||||
####
|
||||
# Hog
|
||||
|
||||
# Teams allowed to modify transformation code (comma-separated list of team IDs),
|
||||
# keep in sync with client-side feature flag HOG_TRANSFORMATIONS_CUSTOM_HOG_ENABLED
|
||||
HOG_TRANSFORMATIONS_CUSTOM_ENABLED_TEAMS = get_list(os.getenv("HOG_TRANSFORMATIONS_CUSTOM_ENABLED_TEAMS", ""))
|
||||
CREATE_HOG_FUNCTION_FROM_PLUGIN_CONFIG = get_from_env("CREATE_HOG_FUNCTION_FROM_PLUGIN_CONFIG", False, type_cast=bool)
|
||||
|
||||
####
|
||||
# Livestream
|
||||
|
||||
# Passed to the frontend for the web app to know where to connect to
|
||||
LIVESTREAM_HOST = get_from_env("LIVESTREAM_HOST", "")
|
||||
|
||||
####
|
||||
# Local dev
|
||||
|
||||
# disables frontend side navigation hooks to make hot-reload work seamlessly
|
||||
DEV_DISABLE_NAVIGATION_HOOKS = get_from_env("DEV_DISABLE_NAVIGATION_HOOKS", False, type_cast=bool)
|
||||
|
||||
####
|
||||
# Random/temporary
|
||||
# Everything that is supposed to be removed eventually
|
||||
|
||||
# temporary flag to control new UUID version setting in posthog-js
|
||||
# is set to v7 to test new generation but can be set to "og" to revert
|
||||
POSTHOG_JS_UUID_VERSION = os.getenv("POSTHOG_JS_UUID_VERSION", "v7")
|
||||
|
||||
@@ -500,7 +500,7 @@ class TestExternalDataSource(APIBaseTest):
|
||||
self._create_external_data_source()
|
||||
self._create_external_data_source()
|
||||
|
||||
with self.assertNumQueries(21):
|
||||
with self.assertNumQueries(23):
|
||||
response = self.client.get(f"/api/projects/{self.team.pk}/external_data_sources/")
|
||||
payload = response.json()
|
||||
|
||||
|
||||
@@ -158,18 +158,27 @@ class DataWarehouseSavedQuery(CreatedMetaFields, UUIDModel, DeletedMetaFields):
|
||||
return f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/team_{self.team.pk}_model_{self.id.hex}/modeling/{normalized_name}"
|
||||
|
||||
def hogql_definition(self, modifiers: Optional[HogQLQueryModifiers] = None) -> Union[SavedQuery, S3Table]:
|
||||
from posthog.warehouse.models.table import CLICKHOUSE_HOGQL_MAPPING
|
||||
if (
|
||||
self.table is not None
|
||||
and (self.status == DataWarehouseSavedQuery.Status.COMPLETED or self.last_run_at is not None)
|
||||
and modifiers is not None
|
||||
and modifiers.useMaterializedViews
|
||||
):
|
||||
return self.table.hogql_definition(modifiers)
|
||||
|
||||
columns = self.columns or {}
|
||||
|
||||
fields: dict[str, FieldOrTable] = {}
|
||||
structure = []
|
||||
|
||||
from posthog.warehouse.models.table import CLICKHOUSE_HOGQL_MAPPING
|
||||
|
||||
for column, type in columns.items():
|
||||
# Support for 'old' style columns
|
||||
if isinstance(type, str):
|
||||
clickhouse_type = type
|
||||
else:
|
||||
elif isinstance(type, dict):
|
||||
clickhouse_type = type["clickhouse"]
|
||||
else:
|
||||
raise Exception(f"Unknown column type: {type}") # Never reached
|
||||
|
||||
if clickhouse_type.startswith("Nullable("):
|
||||
clickhouse_type = clickhouse_type.replace("Nullable(", "")[:-1]
|
||||
@@ -178,37 +187,23 @@ class DataWarehouseSavedQuery(CreatedMetaFields, UUIDModel, DeletedMetaFields):
|
||||
if clickhouse_type.startswith("Array("):
|
||||
clickhouse_type = remove_named_tuples(clickhouse_type)
|
||||
|
||||
if isinstance(type, dict):
|
||||
column_invalid = not type.get("valid", True)
|
||||
else:
|
||||
column_invalid = False
|
||||
|
||||
if not column_invalid or (modifiers is not None and modifiers.s3TableUseInvalidColumns):
|
||||
structure.append(f"`{column}` {clickhouse_type}")
|
||||
|
||||
# Support for 'old' style columns
|
||||
if isinstance(type, str):
|
||||
hogql_type_str = clickhouse_type.partition("(")[0]
|
||||
hogql_type = CLICKHOUSE_HOGQL_MAPPING[hogql_type_str]
|
||||
else:
|
||||
elif isinstance(type, dict):
|
||||
hogql_type = STR_TO_HOGQL_MAPPING[type["hogql"]]
|
||||
else:
|
||||
raise Exception(f"Unknown column type: {type}") # Never reached
|
||||
|
||||
fields[column] = hogql_type(name=column)
|
||||
|
||||
if (
|
||||
self.table is not None
|
||||
and (self.status == DataWarehouseSavedQuery.Status.COMPLETED or self.last_run_at is not None)
|
||||
and modifiers is not None
|
||||
and modifiers.useMaterializedViews
|
||||
):
|
||||
return self.table.hogql_definition(modifiers)
|
||||
else:
|
||||
return SavedQuery(
|
||||
id=str(self.id),
|
||||
name=self.name,
|
||||
query=self.query["query"],
|
||||
fields=fields,
|
||||
)
|
||||
return SavedQuery(
|
||||
id=str(self.id),
|
||||
name=self.name,
|
||||
query=self.query["query"],
|
||||
fields=fields,
|
||||
)
|
||||
|
||||
|
||||
@database_sync_to_async
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# Products
|
||||
|
||||
This file contains PostHog products.
|
||||
This file contains PostHog products.
|
||||
|
||||
- Internal RFC: https://github.com/PostHog/product-internal/pull/703
|
||||
- Mergerd in PR: https://github.com/PostHog/posthog/pull/26693
|
||||
- Internal RFC: https://github.com/PostHog/product-internal/pull/703
|
||||
- Merged in PR: https://github.com/PostHog/posthog/pull/26693
|
||||
|
||||
## Dev guidelines
|
||||
|
||||
- Please keep the folder names `under_score` cased, as dashes make it hard to import files in some languages (e.g. Python, Ruby, ...)
|
||||
- Please keep the folder names `under_score` cased, as dashes make it hard to import files in some languages (e.g. Python, Ruby, ...)
|
||||
|
||||
4
products/conftest.py
Normal file
4
products/conftest.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# NOTE: We attempt to keep this file in sync with posthog/conftest.py
|
||||
# by importing all of the declarations in that file.
|
||||
|
||||
from posthog.conftest import *
|
||||
0
products/revenue_analytics/__init__.py
Normal file
0
products/revenue_analytics/__init__.py
Normal file
@@ -1,20 +1,17 @@
|
||||
from typing import Union
|
||||
from typing import cast, Union
|
||||
|
||||
from posthog.hogql import ast
|
||||
from posthog.hogql.constants import LimitContext
|
||||
from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator
|
||||
from posthog.hogql_queries.query_runner import QueryRunner
|
||||
from posthog.hogql.database.database import create_hogql_database, Database
|
||||
from posthog.hogql.hogql import HogQLContext
|
||||
from posthog.schema import (
|
||||
RevenueExampleDataWarehouseTablesQuery,
|
||||
RevenueExampleDataWarehouseTablesQueryResponse,
|
||||
CachedRevenueExampleDataWarehouseTablesQueryResponse,
|
||||
)
|
||||
|
||||
from posthog.hogql.database.schema.exchange_rate import (
|
||||
DEFAULT_CURRENCY,
|
||||
currency_expression_for_data_warehouse,
|
||||
revenue_expression_for_data_warehouse,
|
||||
)
|
||||
from ..models import RevenueAnalyticsRevenueView
|
||||
|
||||
|
||||
class RevenueExampleDataWarehouseTablesQueryRunner(QueryRunner):
|
||||
@@ -22,6 +19,8 @@ class RevenueExampleDataWarehouseTablesQueryRunner(QueryRunner):
|
||||
response: RevenueExampleDataWarehouseTablesQueryResponse
|
||||
cached_response: CachedRevenueExampleDataWarehouseTablesQueryResponse
|
||||
paginator: HogQLHasMorePaginator
|
||||
database: Database
|
||||
hogql_context: HogQLContext
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -29,42 +28,37 @@ class RevenueExampleDataWarehouseTablesQueryRunner(QueryRunner):
|
||||
limit_context=LimitContext.QUERY, limit=self.query.limit if self.query.limit else None
|
||||
)
|
||||
|
||||
def to_query(self) -> Union[ast.SelectQuery, ast.SelectSetQuery]:
|
||||
tracking_config = self.query.revenueTrackingConfig
|
||||
# We create a new context here because we need to access the database
|
||||
# below in the to_query method and creating a database is pretty heavy
|
||||
# so we'll reuse this database for the query once it eventually runs
|
||||
self.database = create_hogql_database(team=self.team)
|
||||
self.hogql_context = HogQLContext(team_id=self.team.pk, database=self.database)
|
||||
|
||||
def to_query(self) -> Union[ast.SelectQuery, ast.SelectSetQuery]:
|
||||
queries = []
|
||||
if tracking_config.dataWarehouseTables:
|
||||
for table in tracking_config.dataWarehouseTables:
|
||||
|
||||
# UNION ALL for all of the `RevenueAnalyticsRevenueView`s
|
||||
for view_name in self.database.get_views():
|
||||
view = self.database.get_table(view_name)
|
||||
if isinstance(view, RevenueAnalyticsRevenueView):
|
||||
view = cast(RevenueAnalyticsRevenueView, view)
|
||||
|
||||
queries.append(
|
||||
ast.SelectQuery(
|
||||
select=[
|
||||
ast.Alias(alias="table_name", expr=ast.Constant(value=table.tableName)),
|
||||
ast.Alias(alias="view_name", expr=ast.Constant(value=view_name)),
|
||||
ast.Alias(alias="distinct_id", expr=ast.Field(chain=[view_name, "id"])),
|
||||
ast.Alias(
|
||||
alias="distinct_id", expr=ast.Field(chain=[table.tableName, table.distinctIdColumn])
|
||||
alias="original_revenue", expr=ast.Field(chain=[view_name, "adjusted_original_amount"])
|
||||
),
|
||||
ast.Alias(
|
||||
alias="original_revenue",
|
||||
expr=revenue_expression_for_data_warehouse(
|
||||
tracking_config, table, do_currency_conversion=False
|
||||
),
|
||||
),
|
||||
ast.Alias(
|
||||
alias="original_currency",
|
||||
expr=currency_expression_for_data_warehouse(tracking_config, table),
|
||||
),
|
||||
ast.Alias(
|
||||
alias="revenue",
|
||||
expr=revenue_expression_for_data_warehouse(tracking_config, table),
|
||||
),
|
||||
ast.Alias(
|
||||
alias="currency",
|
||||
expr=ast.Constant(value=(tracking_config.baseCurrency or DEFAULT_CURRENCY).value),
|
||||
alias="original_currency", expr=ast.Field(chain=[view_name, "original_currency"])
|
||||
),
|
||||
ast.Alias(alias="revenue", expr=ast.Field(chain=[view_name, "amount"])),
|
||||
ast.Alias(alias="currency", expr=ast.Field(chain=[view_name, "currency"])),
|
||||
],
|
||||
select_from=ast.JoinExpr(table=ast.Field(chain=[table.tableName])),
|
||||
order_by=[
|
||||
ast.OrderExpr(expr=ast.Field(chain=[table.tableName, table.timestampColumn]), order="DESC")
|
||||
],
|
||||
select_from=ast.JoinExpr(table=ast.Field(chain=[view_name])),
|
||||
order_by=[ast.OrderExpr(expr=ast.Field(chain=[view_name, "timestamp"]), order="DESC")],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -72,6 +66,9 @@ class RevenueExampleDataWarehouseTablesQueryRunner(QueryRunner):
|
||||
if len(queries) == 0:
|
||||
return ast.SelectQuery.empty()
|
||||
|
||||
if len(queries) == 1:
|
||||
return queries[0]
|
||||
|
||||
return ast.SelectSetQuery.create_from_queries(queries, set_operator="UNION ALL")
|
||||
|
||||
def calculate(self):
|
||||
@@ -81,10 +78,11 @@ class RevenueExampleDataWarehouseTablesQueryRunner(QueryRunner):
|
||||
team=self.team,
|
||||
timings=self.timings,
|
||||
modifiers=self.modifiers,
|
||||
hogql_context=self.hogql_context,
|
||||
)
|
||||
|
||||
return RevenueExampleDataWarehouseTablesQueryResponse(
|
||||
columns=["table_name", "distinct_id", "original_revenue", "original_currency", "revenue", "currency"],
|
||||
columns=response.columns,
|
||||
results=response.results,
|
||||
timings=response.timings,
|
||||
types=response.types,
|
||||
@@ -1,7 +1,5 @@
|
||||
import json
|
||||
|
||||
from pydantic import ValidationError
|
||||
|
||||
from posthog.hogql import ast
|
||||
from posthog.hogql.ast import CompareOperationOp
|
||||
from posthog.hogql.constants import LimitContext
|
||||
@@ -14,7 +12,6 @@ from posthog.hogql.database.schema.exchange_rate import (
|
||||
currency_expression_for_all_events,
|
||||
)
|
||||
from posthog.schema import (
|
||||
RevenueTrackingConfig,
|
||||
RevenueExampleEventsQuery,
|
||||
RevenueExampleEventsQueryResponse,
|
||||
CachedRevenueExampleEventsQueryResponse,
|
||||
@@ -34,11 +31,7 @@ class RevenueExampleEventsQueryRunner(QueryRunner):
|
||||
)
|
||||
|
||||
def to_query(self) -> ast.SelectQuery:
|
||||
tracking_config = RevenueTrackingConfig()
|
||||
try:
|
||||
tracking_config = RevenueTrackingConfig.model_validate(self.query.revenueTrackingConfig)
|
||||
except ValidationError:
|
||||
pass # Use default config set above
|
||||
revenue_config = self.team.revenue_config
|
||||
|
||||
select = ast.SelectQuery(
|
||||
select=[
|
||||
@@ -54,12 +47,12 @@ class RevenueExampleEventsQueryRunner(QueryRunner):
|
||||
ast.Field(chain=["event"]),
|
||||
ast.Alias(
|
||||
alias="original_revenue",
|
||||
expr=revenue_expression_for_events(tracking_config, do_currency_conversion=False),
|
||||
expr=revenue_expression_for_events(revenue_config, do_currency_conversion=False),
|
||||
),
|
||||
ast.Alias(alias="original_currency", expr=currency_expression_for_all_events(tracking_config)),
|
||||
ast.Alias(alias="revenue", expr=revenue_expression_for_events(tracking_config)),
|
||||
ast.Alias(alias="original_currency", expr=currency_expression_for_all_events(revenue_config)),
|
||||
ast.Alias(alias="revenue", expr=revenue_expression_for_events(revenue_config)),
|
||||
ast.Alias(
|
||||
alias="currency", expr=ast.Constant(value=(tracking_config.baseCurrency or DEFAULT_CURRENCY).value)
|
||||
alias="currency", expr=ast.Constant(value=(revenue_config.baseCurrency or DEFAULT_CURRENCY).value)
|
||||
),
|
||||
ast.Call(
|
||||
name="tuple",
|
||||
@@ -76,7 +69,7 @@ class RevenueExampleEventsQueryRunner(QueryRunner):
|
||||
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
|
||||
where=ast.And(
|
||||
exprs=[
|
||||
revenue_where_expr_for_events(tracking_config),
|
||||
revenue_where_expr_for_events(revenue_config),
|
||||
ast.CompareOperation(
|
||||
op=CompareOperationOp.NotEq,
|
||||
left=ast.Field(chain=["revenue"]), # refers to the Alias above
|
||||
@@ -0,0 +1,48 @@
|
||||
# serializer version: 1
|
||||
# name: TestRevenueExampleDataWarehouseTablesQueryRunner.test_database_query
|
||||
'''
|
||||
SELECT 'stripe_posthog_test__revenue' AS view_name,
|
||||
stripe_posthog_test__revenue.id AS distinct_id,
|
||||
stripe_posthog_test__revenue.adjusted_original_amount AS original_revenue,
|
||||
stripe_posthog_test__revenue.original_currency AS original_currency,
|
||||
stripe_posthog_test__revenue.amount AS revenue,
|
||||
stripe_posthog_test__revenue.currency AS currency
|
||||
FROM
|
||||
(SELECT stripe_charge.id AS id,
|
||||
parseDateTime64BestEffortOrNull(toString(stripe_charge.created), 6, 'UTC') AS timestamp,
|
||||
accurateCastOrNull(stripe_charge.amount, 'Decimal64(10)') AS original_amount,
|
||||
upper(stripe_charge.currency) AS original_currency,
|
||||
in(original_currency,
|
||||
tuple('BIF', 'CLP', 'DJF', 'GNF', 'JPY', 'KMF', 'KRW', 'MGA', 'PYG', 'RWF', 'UGX', 'VND', 'VUV', 'XAF', 'XOF', 'XPF')) AS currency_is_zero_decimal,
|
||||
if(currency_is_zero_decimal, accurateCastOrNull(1, 'Decimal64(10)'), accurateCastOrNull(100, 'Decimal64(10)')) AS amount_decimal_divider,
|
||||
divideDecimal(original_amount, amount_decimal_divider) AS adjusted_original_amount,
|
||||
'GBP' AS currency,
|
||||
if(equals(original_currency, currency), toDecimal64(adjusted_original_amount, 10), if(dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', original_currency, toDate(ifNull(timestamp, toDateTime(0, 'UTC'))), toDecimal64(0, 10)) = 0, toDecimal64(0, 10), multiplyDecimal(divideDecimal(toDecimal64(adjusted_original_amount, 10), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', original_currency, toDate(ifNull(timestamp, toDateTime(0, 'UTC'))), toDecimal64(0, 10))), dictGetOrDefault(`posthog_test`.`exchange_rate_dict`, 'rate', currency, toDate(ifNull(timestamp, toDateTime(0, 'UTC'))), toDecimal64(0, 10))))) AS amount
|
||||
FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.revenue.stripe_charges/*.csv', 'object_storage_root_user', 'object_storage_root_password', 'CSVWithNames', '`id` String, `paid` Int8, `amount` Int64, `object` String, `source` String, `status` String, `created` DateTime, `invoice` String, `outcome` String, `captured` Int8, `currency` String, `customer` String, `disputed` Int8, `livemode` Int8, `metadata` String, `refunded` Int8, `description` String, `receipt_url` String, `failure_code` String, `fraud_details` String, `radar_options` String, `receipt_email` String, `payment_intent` String, `payment_method` String, `amount_captured` Int64, `amount_refunded` Int64, `billing_details` String, `failure_message` String, `balance_transaction` String, `statement_descriptor` String, `payment_method_details` String, `calculated_statement_descriptor` String') AS stripe_charge) AS stripe_posthog_test__revenue
|
||||
ORDER BY stripe_posthog_test__revenue.timestamp DESC
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
# name: TestRevenueExampleDataWarehouseTablesQueryRunner.test_no_crash_when_no_data
|
||||
'''
|
||||
SELECT 1
|
||||
WHERE 0
|
||||
LIMIT 101
|
||||
OFFSET 0 SETTINGS readonly=2,
|
||||
max_execution_time=60,
|
||||
allow_experimental_object_type=1,
|
||||
format_csv_allow_double_quotes=0,
|
||||
max_ast_elements=4000000,
|
||||
max_expanded_ast_elements=4000000,
|
||||
max_bytes_before_external_group_by=0,
|
||||
transform_null_in=1
|
||||
'''
|
||||
# ---
|
||||
@@ -0,0 +1,13 @@
|
||||
id,paid,amount,object,status,created,invoice,captured,currency,customer,disputed,livemode,metadata,refunded,description,receipt_url,failure_code,fraud_details,radar_options,receipt_email,payment_intent,payment_method,amount_captured,amount_refunded,billing_details,failure_message,balance_transaction,statement_descriptor,calculated_statement_descriptor,source,outcome,payment_method_details
|
||||
ch_1,1,10000,charge,succeeded,"2025-01-29 12:00:00",in_1,1,usd,cus_1,0,0,,0,"Subscription payment","https://receipt.stripe.com/1",,,,customer@test.com,pi_1,pm_1,10000,0,,,,SUBSCRIPTION,"POSTHOG*SUBSCRIPTION",,,
|
||||
ch_2,1,15000,charge,succeeded,"2025-01-30 08:43:00",in_2,1,eur,cus_2,0,0,,0,"One-time purchase","https://receipt.stripe.com/2",,,,customer2@test.com,pi_2,pm_2,15000,0,,,,ONE-TIME,"POSTHOG*PURCHASE",,,
|
||||
ch_3,1,5000,charge,succeeded,"2025-01-31 07:12:00",in_3,1,gbp,cus_3,0,0,,0,"Add-on purchase","https://receipt.stripe.com/3",,,,customer3@test.com,pi_3,pm_3,5000,0,,,,ADD-ON,"POSTHOG*ADD-ON",,,
|
||||
ch_4,1,20000,charge,succeeded,"2025-02-01 05:57:00",in_4,1,usd,cus_4,0,0,,1,"Refunded purchase","https://receipt.stripe.com/4",,,,customer4@test.com,pi_4,pm_4,20000,20000,,,,PURCHASE,"POSTHOG*REFUNDED",,,
|
||||
ch_5,0,7500,charge,failed,"2025-02-02 02:34:00",in_5,0,eur,cus_5,0,0,,0,"Failed payment",,insufficient_funds,,,customer5@test.com,pi_5,pm_5,0,0,,"Insufficient funds",,,,,,
|
||||
ch_6,1,12500,charge,succeeded,"2025-02-03 07:29:00",in_6,1,gbp,cus_6,0,0,,1,"Partial refund","https://receipt.stripe.com/6",,,,customer6@test.com,pi_6,pm_6,12500,2500,,,,PARTIAL-REFUND,"POSTHOG*PARTIAL-REFUND",,,
|
||||
ch_7,1,30000,charge,succeeded,"2025-02-04 23:46:00",in_7,1,usd,cus_7,0,0,,0,"Enterprise plan","https://receipt.stripe.com/7",,,,customer7@test.com,pi_7,pm_7,30000,0,,,,ENTERPRISE,"POSTHOG*ENTERPRISE",,,
|
||||
ch_8,1,500,charge,succeeded,"2025-02-05 09:21:00",in_8,1,jpy,cus_8,0,0,,0,"Low amount purchase","https://receipt.stripe.com/8",,,,customer8@test.com,pi_8,pm_8,500,0,,,,ADDON,"POSTHOG*ADDON",,,
|
||||
ch_9,1,25000,charge,succeeded,"2025-02-06 13:43:00",in_9,1,eur,cus_9,0,0,,0,"Team plan upgrade","https://receipt.stripe.com/9",,,,customer9@test.com,pi_9,pm_9,25000,0,,,,UPGRADE,"POSTHOG*UPGRADE",,,
|
||||
ch_10,1,18000,charge,succeeded,"2025-02-07 04:51:00",in_10,1,gbp,cus_10,0,0,,0,"Annual subscription","https://receipt.stripe.com/10",,,,customer10@test.com,pi_10,pm_10,18000,0,,,,ANNUAL,"POSTHOG*ANNUAL",,,
|
||||
ch_11,0,9000,charge,pending,"2025-02-08 11:00:00",,0,usd,cus_11,0,0,,0,"Pending charge",,,,,customer11@test.com,pi_11,pm_11,0,0,,,,,,,,
|
||||
ch_12,1,22000,charge,succeeded,"2025-02-09 02:42:00",in_12,1,eur,cus_12,0,0,,1,"Full refund","https://receipt.stripe.com/12",,,,customer12@test.com,pi_12,pm_12,22000,22000,,,,REFUND,"POSTHOG*REFUND",,,
|
||||
|
@@ -0,0 +1,66 @@
|
||||
from posthog.schema import CurrencyCode
|
||||
from posthog.warehouse.models import ExternalDataSource, ExternalDataSchema, DataWarehouseTable, DataWarehouseCredential
|
||||
from posthog.test.base import BaseTest
|
||||
|
||||
from products.revenue_analytics.backend.models import (
|
||||
RevenueAnalyticsRevenueView,
|
||||
ZERO_DECIMAL_CURRENCIES_IN_STRIPE,
|
||||
)
|
||||
|
||||
|
||||
class TestRevenueAnalyticsModels(BaseTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.source = ExternalDataSource.objects.create(
|
||||
team=self.team,
|
||||
source_id="source_id",
|
||||
connection_id="connection_id",
|
||||
status=ExternalDataSource.Status.COMPLETED,
|
||||
source_type=ExternalDataSource.Type.STRIPE,
|
||||
)
|
||||
self.credentials = DataWarehouseCredential.objects.create(
|
||||
access_key="blah", access_secret="blah", team=self.team
|
||||
)
|
||||
self.table = DataWarehouseTable.objects.create(
|
||||
name="table_1",
|
||||
format="Parquet",
|
||||
team=self.team,
|
||||
external_data_source=self.source,
|
||||
external_data_source_id=self.source.id,
|
||||
credential=self.credentials,
|
||||
url_pattern="https://bucket.s3/data/*",
|
||||
columns={"id": {"hogql": "StringDatabaseField", "clickhouse": "Nullable(String)", "schema_valid": True}},
|
||||
)
|
||||
self.schema = ExternalDataSchema.objects.create(
|
||||
team=self.team,
|
||||
name="Charge",
|
||||
source=self.source,
|
||||
table=self.table,
|
||||
should_sync=True,
|
||||
last_synced_at="2024-01-01",
|
||||
)
|
||||
|
||||
def test_zero_decimal_currencies(self):
|
||||
"""Test that zero decimal currencies are correctly defined"""
|
||||
self.assertIn(CurrencyCode.JPY, ZERO_DECIMAL_CURRENCIES_IN_STRIPE)
|
||||
self.assertIn(CurrencyCode.KRW, ZERO_DECIMAL_CURRENCIES_IN_STRIPE)
|
||||
self.assertNotIn(CurrencyCode.USD, ZERO_DECIMAL_CURRENCIES_IN_STRIPE)
|
||||
|
||||
def test_revenue_view_creation(self):
|
||||
view = RevenueAnalyticsRevenueView.for_schema_source(self.source)
|
||||
self.assertIsNotNone(view)
|
||||
|
||||
def test_revenue_view_non_stripe_source(self):
|
||||
"""Test that RevenueAnalyticsRevenueView returns None for non-Stripe sources"""
|
||||
self.source.source_type = "Salesforce"
|
||||
self.source.save()
|
||||
|
||||
view = RevenueAnalyticsRevenueView.for_schema_source(self.source)
|
||||
self.assertIsNone(view)
|
||||
|
||||
def test_revenue_view_missing_schema(self):
|
||||
"""Test that RevenueAnalyticsRevenueView handles missing schema gracefully"""
|
||||
self.schema.delete()
|
||||
|
||||
view = RevenueAnalyticsRevenueView.for_schema_source(self.source)
|
||||
self.assertIsNone(view)
|
||||
@@ -0,0 +1,130 @@
|
||||
from freezegun import freeze_time
|
||||
from pathlib import Path
|
||||
from decimal import Decimal
|
||||
|
||||
from products.revenue_analytics.backend.hogql_queries.revenue_example_data_warehouse_tables_query_runner import (
|
||||
RevenueExampleDataWarehouseTablesQueryRunner,
|
||||
)
|
||||
from products.revenue_analytics.backend.models import STRIPE_DATA_WAREHOUSE_CHARGE_IDENTIFIER
|
||||
|
||||
from posthog.schema import (
|
||||
RevenueExampleDataWarehouseTablesQuery,
|
||||
RevenueTrackingConfig,
|
||||
RevenueExampleDataWarehouseTablesQueryResponse,
|
||||
CurrencyCode,
|
||||
)
|
||||
from posthog.test.base import (
|
||||
APIBaseTest,
|
||||
ClickhouseTestMixin,
|
||||
snapshot_clickhouse_queries,
|
||||
)
|
||||
from posthog.warehouse.models import ExternalDataSchema
|
||||
|
||||
from posthog.warehouse.test.utils import create_data_warehouse_table_from_csv
|
||||
|
||||
|
||||
STRIPE_CHARGE_COLUMNS = {
|
||||
"id": "String",
|
||||
"paid": "Int8",
|
||||
"amount": "Int64",
|
||||
"object": "String",
|
||||
"status": "String",
|
||||
"created": "DateTime",
|
||||
"invoice": "String",
|
||||
"captured": "Int8",
|
||||
"currency": "String",
|
||||
"customer": "String",
|
||||
"disputed": "Int8",
|
||||
"livemode": "Int8",
|
||||
"metadata": "String",
|
||||
"refunded": "Int8",
|
||||
"description": "String",
|
||||
"receipt_url": "String",
|
||||
"failure_code": "String",
|
||||
"fraud_details": "String",
|
||||
"radar_options": "String",
|
||||
"receipt_email": "String",
|
||||
"payment_intent": "String",
|
||||
"payment_method": "String",
|
||||
"amount_captured": "Int64",
|
||||
"amount_refunded": "Int64",
|
||||
"billing_details": "String",
|
||||
"failure_message": "String",
|
||||
"balance_transaction": "String",
|
||||
"statement_descriptor": "String",
|
||||
"calculated_statement_descriptor": "String",
|
||||
"source": "String",
|
||||
"outcome": "String",
|
||||
"payment_method_details": "String",
|
||||
}
|
||||
|
||||
REVENUE_TRACKING_CONFIG = RevenueTrackingConfig(baseCurrency=CurrencyCode.GBP, events=[])
|
||||
TEST_BUCKET = "test_storage_bucket-posthog.revenue.stripe_charges"
|
||||
|
||||
|
||||
@snapshot_clickhouse_queries
|
||||
class TestRevenueExampleDataWarehouseTablesQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
QUERY_TIMESTAMP = "2025-01-29"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.csv_path = Path(__file__).parent / "data" / "stripe_charges.csv"
|
||||
self.table, self.source, self.credential, self.csv_df, self.cleanUpFilesystem = (
|
||||
create_data_warehouse_table_from_csv(
|
||||
self.csv_path,
|
||||
"stripe_charge",
|
||||
STRIPE_CHARGE_COLUMNS,
|
||||
TEST_BUCKET,
|
||||
self.team,
|
||||
)
|
||||
)
|
||||
|
||||
# Besides the default creations above, also create the external data schema
|
||||
# because this is required by the `RevenueAnalyticsRevenueView` to find the right tables
|
||||
self.schema = ExternalDataSchema.objects.create(
|
||||
team=self.team,
|
||||
name=STRIPE_DATA_WAREHOUSE_CHARGE_IDENTIFIER,
|
||||
source=self.source,
|
||||
table=self.table,
|
||||
should_sync=True,
|
||||
last_synced_at="2024-01-01",
|
||||
)
|
||||
|
||||
self.team.revenue_tracking_config = REVENUE_TRACKING_CONFIG.model_dump()
|
||||
self.team.save()
|
||||
|
||||
def tearDown(self):
|
||||
self.cleanUpFilesystem()
|
||||
super().tearDown()
|
||||
|
||||
def _run_revenue_example_external_tables_query(self):
|
||||
with freeze_time(self.QUERY_TIMESTAMP):
|
||||
query = RevenueExampleDataWarehouseTablesQuery()
|
||||
runner = RevenueExampleDataWarehouseTablesQueryRunner(team=self.team, query=query)
|
||||
|
||||
response = runner.calculate()
|
||||
RevenueExampleDataWarehouseTablesQueryResponse.model_validate(response)
|
||||
|
||||
return response
|
||||
|
||||
def test_no_crash_when_no_data(self):
|
||||
self.table.delete()
|
||||
results = self._run_revenue_example_external_tables_query().results
|
||||
|
||||
assert len(results) == 0
|
||||
|
||||
def test_database_query(self):
|
||||
response = self._run_revenue_example_external_tables_query()
|
||||
results = response.results
|
||||
|
||||
assert len(results) == len(self.csv_df) # Same amount os rows inside the CSV
|
||||
|
||||
# Proper conversions for some of the rows
|
||||
assert results[0][2:] == (Decimal("220"), "EUR", Decimal("182.247167654"), "GBP")
|
||||
assert results[1][2:] == (Decimal("90"), "USD", Decimal("71.73"), "GBP")
|
||||
assert results[2][2:] == (Decimal("180"), "GBP", Decimal("180"), "GBP")
|
||||
|
||||
# Test JPY where there are no decimals, and an input of 500 implies 500 Yen
|
||||
# rather than the above where we had 22000 for 220 EUR (and etc.)
|
||||
assert results[4][2:] == (Decimal("500"), "JPY", Decimal("2.5438762801"), "GBP")
|
||||
@@ -1,11 +1,12 @@
|
||||
from decimal import Decimal
|
||||
from typing import Optional
|
||||
|
||||
from freezegun import freeze_time
|
||||
from unittest.mock import patch
|
||||
|
||||
from posthog.hogql.constants import LimitContext
|
||||
from posthog.hogql_queries.web_analytics.revenue_example_events_query_runner import RevenueExampleEventsQueryRunner
|
||||
from products.revenue_analytics.backend.hogql_queries.revenue_example_events_query_runner import (
|
||||
RevenueExampleEventsQueryRunner,
|
||||
)
|
||||
|
||||
from posthog.models.utils import uuid7
|
||||
from posthog.schema import (
|
||||
CurrencyCode,
|
||||
@@ -113,19 +114,21 @@ class TestRevenueExampleEventsQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
def _run_revenue_example_events_query(
|
||||
self,
|
||||
revenue_tracking_config: RevenueTrackingConfig,
|
||||
limit_context: Optional[LimitContext] = None,
|
||||
):
|
||||
with freeze_time(self.QUERY_TIMESTAMP):
|
||||
query = RevenueExampleEventsQuery(
|
||||
revenueTrackingConfig=revenue_tracking_config,
|
||||
)
|
||||
runner = RevenueExampleEventsQueryRunner(team=self.team, query=query, limit_context=limit_context)
|
||||
self.team.revenue_tracking_config = revenue_tracking_config.model_dump()
|
||||
self.team.save()
|
||||
|
||||
runner = RevenueExampleEventsQueryRunner(team=self.team, query=RevenueExampleEventsQuery())
|
||||
|
||||
response = runner.calculate()
|
||||
RevenueExampleEventsQueryResponse.model_validate(response)
|
||||
|
||||
return response
|
||||
|
||||
def test_no_crash_when_no_data(self):
|
||||
self._run_revenue_example_events_query(EMPTY_REVENUE_TRACKING_CONFIG)
|
||||
results = self._run_revenue_example_events_query(EMPTY_REVENUE_TRACKING_CONFIG).results
|
||||
assert len(results.results) == 0
|
||||
|
||||
def test_single_event(self):
|
||||
s11 = str(uuid7("2023-12-02"))
|
||||
@@ -1,4 +1,4 @@
|
||||
# NOTE: This isn't testing any of the custom Web Analytics code,
|
||||
# NOTE: This isn't testing any of the custom Revenue Analytics code,
|
||||
# but rather testing the revenue code in insights/trends/aggregation_operations.py
|
||||
from decimal import Decimal
|
||||
from typing import Any, Optional
|
||||
@@ -34,7 +34,7 @@ from posthog.test.base import (
|
||||
|
||||
|
||||
@snapshot_clickhouse_queries
|
||||
class TestWebAnalyticsRevenue(ClickhouseTestMixin, APIBaseTest):
|
||||
class TestRevenueAnalyticsInsights(ClickhouseTestMixin, APIBaseTest):
|
||||
def _create_events(self, data, event="product_sold"):
|
||||
person_result = []
|
||||
for id, events in data:
|
||||
114
products/revenue_analytics/backend/models.py
Normal file
114
products/revenue_analytics/backend/models.py
Normal file
@@ -0,0 +1,114 @@
|
||||
from typing import cast, Optional
|
||||
|
||||
from posthog.warehouse.models.external_data_source import ExternalDataSource
|
||||
from posthog.warehouse.models.table import DataWarehouseTable
|
||||
from posthog.warehouse.models.external_data_schema import ExternalDataSchema
|
||||
from posthog.models.exchange_rate.sql import EXCHANGE_RATE_DECIMAL_PRECISION
|
||||
from posthog.schema import CurrencyCode
|
||||
from posthog.hogql.database.models import (
|
||||
SavedQuery,
|
||||
BooleanDatabaseField,
|
||||
DateTimeDatabaseField,
|
||||
DecimalDatabaseField,
|
||||
StringDatabaseField,
|
||||
FieldOrTable,
|
||||
)
|
||||
from posthog.hogql.database.schema.exchange_rate import DEFAULT_CURRENCY
|
||||
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
STRIPE_DATA_WAREHOUSE_CHARGE_IDENTIFIER = "Charge"
|
||||
|
||||
# Stripe represents most currencies with integer amounts multiplied by 100,
|
||||
# since most currencies have its smallest unit as 1/100 of their base unit
|
||||
# It just so happens that some currencies don't have that concept, so they're listed here
|
||||
# https://docs.stripe.com/currencies#zero-decimal
|
||||
ZERO_DECIMAL_CURRENCIES_IN_STRIPE: list[CurrencyCode] = [
|
||||
CurrencyCode.BIF,
|
||||
CurrencyCode.CLP,
|
||||
CurrencyCode.DJF,
|
||||
CurrencyCode.GNF,
|
||||
CurrencyCode.JPY,
|
||||
CurrencyCode.KMF,
|
||||
CurrencyCode.KRW,
|
||||
CurrencyCode.MGA,
|
||||
CurrencyCode.PYG,
|
||||
CurrencyCode.RWF,
|
||||
CurrencyCode.UGX,
|
||||
CurrencyCode.VND,
|
||||
CurrencyCode.VUV,
|
||||
CurrencyCode.XAF,
|
||||
CurrencyCode.XOF,
|
||||
CurrencyCode.XPF,
|
||||
]
|
||||
|
||||
FIELDS: dict[str, FieldOrTable] = {
|
||||
"id": StringDatabaseField(name="id"),
|
||||
"timestamp": DateTimeDatabaseField(name="timestamp"),
|
||||
"original_amount": DecimalDatabaseField(name="original_amount", hidden=True),
|
||||
"original_currency": StringDatabaseField(name="original_currency", hidden=True),
|
||||
"currency_is_zero_decimal": BooleanDatabaseField(name="currency_is_zero_decimal", hidden=True),
|
||||
"amount_decimal_divider": DecimalDatabaseField(name="amount_decimal_divider", hidden=True),
|
||||
"adjusted_original_amount": DecimalDatabaseField(name="adjusted_original_amount", hidden=True),
|
||||
"currency": StringDatabaseField(name="currency"),
|
||||
"amount": DecimalDatabaseField(name="amount"),
|
||||
}
|
||||
|
||||
|
||||
class RevenueAnalyticsRevenueView(SavedQuery):
|
||||
@staticmethod
|
||||
def for_schema_source(source: ExternalDataSource) -> Optional["RevenueAnalyticsRevenueView"]:
|
||||
# Currently only works for stripe sources
|
||||
if not source.source_type == ExternalDataSource.Type.STRIPE:
|
||||
return None
|
||||
|
||||
# The table we care about is the one with schema `Charge` since from there we can get
|
||||
# the data we need in our view
|
||||
schema = next(
|
||||
(schema for schema in source.schemas.all() if schema.name == STRIPE_DATA_WAREHOUSE_CHARGE_IDENTIFIER), None
|
||||
)
|
||||
|
||||
if schema is None:
|
||||
return None
|
||||
|
||||
# Casts because pydantic is weird and we need to guarantee it's not Optional
|
||||
# even though we've checked for None above
|
||||
schema = cast(ExternalDataSchema, schema)
|
||||
|
||||
if schema.table is None:
|
||||
return None
|
||||
|
||||
# Same as above, need to guarantee it's not None
|
||||
table = cast(DataWarehouseTable, schema.table)
|
||||
|
||||
team = table.team
|
||||
revenue_config = team.revenue_config
|
||||
|
||||
zero_decimal_currencies = ", ".join([f"'{currency.value}'" for currency in ZERO_DECIMAL_CURRENCIES_IN_STRIPE])
|
||||
base_currency = (revenue_config.baseCurrency or DEFAULT_CURRENCY).value
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
id AS id,
|
||||
created_at AS timestamp,
|
||||
toDecimal(amount, {EXCHANGE_RATE_DECIMAL_PRECISION}) AS original_amount,
|
||||
upper(currency) AS original_currency,
|
||||
original_currency IN ({zero_decimal_currencies}) AS currency_is_zero_decimal,
|
||||
if(currency_is_zero_decimal, toDecimal(1, {EXCHANGE_RATE_DECIMAL_PRECISION}), toDecimal(100, {EXCHANGE_RATE_DECIMAL_PRECISION})) AS amount_decimal_divider,
|
||||
divideDecimal(original_amount, amount_decimal_divider) AS adjusted_original_amount,
|
||||
'{base_currency}' AS currency,
|
||||
convertCurrency(original_currency, currency, adjusted_original_amount, _toDate(ifNull(timestamp, toDateTime(0)))) AS amount
|
||||
FROM
|
||||
{table.name}
|
||||
"""
|
||||
|
||||
return RevenueAnalyticsRevenueView(
|
||||
id=str(table.id),
|
||||
name=f"stripe_{source.prefix or source.id}_revenue",
|
||||
query=query,
|
||||
fields=FIELDS,
|
||||
)
|
||||
@@ -0,0 +1,32 @@
|
||||
import { LemonButton } from '@posthog/lemon-ui'
|
||||
import { router } from 'kea-router'
|
||||
import { PageHeader } from 'lib/components/PageHeader'
|
||||
import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction'
|
||||
import { SceneExport } from 'scenes/sceneTypes'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
import { ProductKey } from '~/types'
|
||||
|
||||
import { revenueAnalyticsLogic } from './revenueAnalyticsLogic'
|
||||
|
||||
export const scene: SceneExport = {
|
||||
component: RevenueAnalyticsScene,
|
||||
logic: revenueAnalyticsLogic,
|
||||
}
|
||||
|
||||
export function RevenueAnalyticsScene(): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
<PageHeader buttons={<LemonButton type="primary">CREATE SOMETHING TODO</LemonButton>} delimited />
|
||||
<ProductIntroduction
|
||||
isEmpty // TODO: Compute whether people need to enable this or not
|
||||
productName="Revenue Analytics"
|
||||
productKey={ProductKey.REVENUE_ANALYTICS}
|
||||
thingName="revenue" // TODO: Doesn't make sense, this is temporary
|
||||
description="Track and analyze your revenue metrics to understand your business performance and growth."
|
||||
docsURL="https://posthog.com/docs/revenue-analytics"
|
||||
action={() => router.actions.push(urls.revenueAnalytics())} // TODO: Doesn't make sense, this is temporary
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
22
products/revenue_analytics/frontend/revenueAnalyticsLogic.ts
Normal file
22
products/revenue_analytics/frontend/revenueAnalyticsLogic.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { kea, path, selectors } from 'kea'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
import { Breadcrumb } from '~/types'
|
||||
|
||||
import type { revenueAnalyticsLogicType } from './revenueAnalyticsLogicType'
|
||||
|
||||
export const revenueAnalyticsLogic = kea<revenueAnalyticsLogicType>([
|
||||
path(['products', 'revenueAnalytics', 'frontend', 'revenueAnalyticsLogic']),
|
||||
selectors({
|
||||
breadcrumbs: [
|
||||
() => [],
|
||||
(): Breadcrumb[] => [
|
||||
{
|
||||
key: 'RevenueAnalytics',
|
||||
name: 'Revenue analytics',
|
||||
path: urls.revenueAnalytics(),
|
||||
},
|
||||
],
|
||||
],
|
||||
}),
|
||||
])
|
||||
@@ -10,14 +10,14 @@ const meta: Meta = {
|
||||
parameters: {
|
||||
layout: 'fullscreen',
|
||||
viewMode: 'story',
|
||||
featureFlags: [FEATURE_FLAGS.WEB_REVENUE_TRACKING, FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS],
|
||||
featureFlags: [FEATURE_FLAGS.WEB_REVENUE_TRACKING, FEATURE_FLAGS.REVENUE_ANALYTICS],
|
||||
},
|
||||
}
|
||||
export default meta
|
||||
|
||||
export function RevenueEventsSettings(): JSX.Element {
|
||||
useEffect(() => {
|
||||
router.actions.push(urls.revenue())
|
||||
router.actions.push(urls.revenueSettings())
|
||||
}, [])
|
||||
return <App />
|
||||
}
|
||||
@@ -8,7 +8,6 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
|
||||
import { useRef, useState } from 'react'
|
||||
|
||||
import { BaseCurrency } from './BaseCurrency'
|
||||
import { DataWarehouseTablesConfiguration } from './DataWarehouseTablesConfiguration'
|
||||
import { EventConfiguration } from './EventConfiguration'
|
||||
import { revenueEventsSettingsLogic } from './revenueEventsSettingsLogic'
|
||||
import { RevenueExampleDataWarehouseTablesData } from './RevenueExampleDataWarehouseTablesData'
|
||||
@@ -19,16 +18,17 @@ type Tab = 'events' | 'data-warehouse'
|
||||
export function RevenueEventsSettings(): JSX.Element {
|
||||
const [activeTab, setActiveTab] = useState<Tab>('events')
|
||||
|
||||
const { events, dataWarehouseTables } = useValues(revenueEventsSettingsLogic)
|
||||
const { events } = useValues(revenueEventsSettingsLogic)
|
||||
|
||||
const { featureFlags } = useValues(featureFlagLogic)
|
||||
|
||||
const eventsButtonRef = useRef<HTMLButtonElement>(null)
|
||||
const dataWarehouseTablesButtonRef = useRef<HTMLButtonElement>(null)
|
||||
|
||||
let introductionDescription =
|
||||
'Revenue events are used to track revenue in Web analytics. You can choose which custom events PostHog should consider as revenue events, and which event property corresponds to the value of the event.'
|
||||
if (featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS]) {
|
||||
const product = featureFlags[FEATURE_FLAGS.REVENUE_ANALYTICS] ? 'Revenue analytics' : 'Web analytics'
|
||||
|
||||
let introductionDescription = `Revenue events are used to track revenue in ${product}. You can choose which custom events PostHog should consider as revenue events, and which event property corresponds to the value of the event.`
|
||||
if (featureFlags[FEATURE_FLAGS.REVENUE_ANALYTICS]) {
|
||||
introductionDescription += ' You can also import revenue data from your PostHog data warehouse tables.'
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ export function RevenueEventsSettings(): JSX.Element {
|
||||
productName="Revenue tracking"
|
||||
thingName="revenue event"
|
||||
description={introductionDescription}
|
||||
isEmpty={events.length === 0 && dataWarehouseTables.length === 0}
|
||||
isEmpty={events.length === 0}
|
||||
actionElementOverride={
|
||||
<>
|
||||
<div className="flex flex-col gap-2">
|
||||
@@ -54,7 +54,7 @@ export function RevenueEventsSettings(): JSX.Element {
|
||||
Create revenue event
|
||||
</LemonButton>
|
||||
|
||||
{featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS] && (
|
||||
{featureFlags[FEATURE_FLAGS.REVENUE_ANALYTICS] && (
|
||||
<LemonButton
|
||||
type="primary"
|
||||
icon={<IconPlus />}
|
||||
@@ -76,11 +76,7 @@ export function RevenueEventsSettings(): JSX.Element {
|
||||
|
||||
<EventConfiguration buttonRef={eventsButtonRef} />
|
||||
|
||||
{featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS] && (
|
||||
<DataWarehouseTablesConfiguration buttonRef={dataWarehouseTablesButtonRef} />
|
||||
)}
|
||||
|
||||
{featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_DATA_WAREHOUSE_REVENUE_SETTINGS] ? (
|
||||
{featureFlags[FEATURE_FLAGS.REVENUE_ANALYTICS] ? (
|
||||
<LemonTabs
|
||||
activeKey={activeTab}
|
||||
onChange={(key) => setActiveTab(key as Tab)}
|
||||
@@ -10,10 +10,7 @@ import {
|
||||
DataTableNode,
|
||||
NodeKind,
|
||||
RevenueCurrencyPropertyConfig,
|
||||
RevenueExampleDataWarehouseTablesQuery,
|
||||
RevenueExampleEventsQuery,
|
||||
RevenueTrackingConfig,
|
||||
RevenueTrackingDataWarehouseTable,
|
||||
RevenueTrackingEventItem,
|
||||
} from '~/queries/schema/schema-general'
|
||||
import { Region } from '~/types'
|
||||
@@ -22,7 +19,6 @@ import type { revenueEventsSettingsLogicType } from './revenueEventsSettingsLogi
|
||||
|
||||
const createEmptyConfig = (region: Region | null | undefined): RevenueTrackingConfig => ({
|
||||
events: [],
|
||||
dataWarehouseTables: [],
|
||||
|
||||
// Region won't be always set because we might mount this before we mount preflightLogic
|
||||
// so we default to USD if we can't determine the region
|
||||
@@ -49,18 +45,6 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
revenueCurrencyProperty,
|
||||
}),
|
||||
|
||||
addDataWarehouseTable: (dataWarehouseTable: RevenueTrackingDataWarehouseTable) => dataWarehouseTable,
|
||||
deleteDataWarehouseTable: (dataWarehouseTableName: string) => ({ dataWarehouseTableName }),
|
||||
updateDataWarehouseTableColumn: (
|
||||
dataWarehouseTableName: string,
|
||||
key: keyof RevenueTrackingDataWarehouseTable & ('timestampColumn' | 'revenueColumn' | 'distinctIdColumn'),
|
||||
newValue: string
|
||||
) => ({ dataWarehouseTableName, key, newValue }),
|
||||
updateDataWarehouseTableRevenueCurrencyColumn: (
|
||||
dataWarehouseTableName: string,
|
||||
revenueCurrencyColumn: RevenueCurrencyPropertyConfig
|
||||
) => ({ dataWarehouseTableName, revenueCurrencyColumn }),
|
||||
|
||||
resetConfig: true,
|
||||
}),
|
||||
reducers(({ values }) => ({
|
||||
@@ -137,67 +121,6 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
}),
|
||||
}
|
||||
},
|
||||
addDataWarehouseTable: (state, newDataWarehouseTable) => {
|
||||
if (!state) {
|
||||
return state
|
||||
}
|
||||
|
||||
// Guarantee we've only got a single external data schema per table
|
||||
if (state.dataWarehouseTables.some((item) => item.tableName === newDataWarehouseTable.tableName)) {
|
||||
return state
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
dataWarehouseTables: [...state.dataWarehouseTables, newDataWarehouseTable],
|
||||
}
|
||||
},
|
||||
deleteDataWarehouseTable: (state, { dataWarehouseTableName }) => {
|
||||
if (!state) {
|
||||
return state
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
dataWarehouseTables: state.dataWarehouseTables.filter(
|
||||
(item) => item.tableName !== dataWarehouseTableName
|
||||
),
|
||||
}
|
||||
},
|
||||
updateDataWarehouseTableColumn: (state, { dataWarehouseTableName, key, newValue }) => {
|
||||
if (!state) {
|
||||
return state
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
dataWarehouseTables: state.dataWarehouseTables.map((item) => {
|
||||
if (item.tableName === dataWarehouseTableName) {
|
||||
return { ...item, [key]: newValue }
|
||||
}
|
||||
|
||||
return item
|
||||
}),
|
||||
}
|
||||
},
|
||||
updateDataWarehouseTableRevenueCurrencyColumn: (
|
||||
state,
|
||||
{ dataWarehouseTableName, revenueCurrencyColumn }
|
||||
) => {
|
||||
if (!state) {
|
||||
return state
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
dataWarehouseTables: state.dataWarehouseTables.map((item) => {
|
||||
if (item.tableName === dataWarehouseTableName) {
|
||||
return { ...item, revenueCurrencyColumn }
|
||||
}
|
||||
|
||||
return item
|
||||
}),
|
||||
}
|
||||
},
|
||||
resetConfig: () => {
|
||||
return values.savedRevenueTrackingConfig
|
||||
},
|
||||
@@ -219,20 +142,16 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
],
|
||||
})),
|
||||
selectors({
|
||||
events: [
|
||||
(s) => [s.revenueTrackingConfig],
|
||||
(revenueTrackingConfig: RevenueTrackingConfig | null) => revenueTrackingConfig?.events || [],
|
||||
],
|
||||
dataWarehouseTables: [
|
||||
(s) => [s.revenueTrackingConfig],
|
||||
(revenueTrackingConfig: RevenueTrackingConfig | null) => revenueTrackingConfig?.dataWarehouseTables || [],
|
||||
],
|
||||
baseCurrency: [
|
||||
(s) => [s.revenueTrackingConfig],
|
||||
(revenueTrackingConfig: RevenueTrackingConfig | null) =>
|
||||
revenueTrackingConfig?.baseCurrency || CurrencyCode.USD,
|
||||
],
|
||||
|
||||
events: [
|
||||
(s) => [s.revenueTrackingConfig],
|
||||
(revenueTrackingConfig: RevenueTrackingConfig | null) => revenueTrackingConfig?.events || [],
|
||||
],
|
||||
changesMadeToEvents: [
|
||||
(s) => [s.revenueTrackingConfig, s.savedRevenueTrackingConfig],
|
||||
(config, savedConfig): boolean => {
|
||||
@@ -240,13 +159,6 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
},
|
||||
],
|
||||
|
||||
changesMadeToDataWarehouseTables: [
|
||||
(s) => [s.revenueTrackingConfig, s.savedRevenueTrackingConfig],
|
||||
(config, savedConfig): boolean => {
|
||||
return !!config && !objectsEqual(config.dataWarehouseTables, savedConfig.dataWarehouseTables)
|
||||
},
|
||||
],
|
||||
|
||||
saveEventsDisabledReason: [
|
||||
(s) => [s.revenueTrackingConfig, s.changesMadeToEvents],
|
||||
(config, changesMade): string | null => {
|
||||
@@ -259,18 +171,6 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
return null
|
||||
},
|
||||
],
|
||||
saveDataWarehouseTablesDisabledReason: [
|
||||
(s) => [s.revenueTrackingConfig, s.changesMadeToDataWarehouseTables],
|
||||
(config, changesMade): string | null => {
|
||||
if (!config) {
|
||||
return 'Loading...'
|
||||
}
|
||||
if (!changesMade) {
|
||||
return 'No changes to save'
|
||||
}
|
||||
return null
|
||||
},
|
||||
],
|
||||
|
||||
exampleEventsQuery: [
|
||||
(s) => [s.savedRevenueTrackingConfig],
|
||||
@@ -279,16 +179,13 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
return null
|
||||
}
|
||||
|
||||
const source: RevenueExampleEventsQuery = {
|
||||
kind: NodeKind.RevenueExampleEventsQuery,
|
||||
revenueTrackingConfig: revenueTrackingConfig,
|
||||
}
|
||||
|
||||
const query: DataTableNode = {
|
||||
kind: NodeKind.DataTableNode,
|
||||
full: true,
|
||||
showPropertyFilter: false,
|
||||
source,
|
||||
source: {
|
||||
kind: NodeKind.RevenueExampleEventsQuery,
|
||||
},
|
||||
}
|
||||
|
||||
return query
|
||||
@@ -301,16 +198,13 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
return null
|
||||
}
|
||||
|
||||
const source: RevenueExampleDataWarehouseTablesQuery = {
|
||||
kind: NodeKind.RevenueExampleDataWarehouseTablesQuery,
|
||||
revenueTrackingConfig: revenueTrackingConfig,
|
||||
}
|
||||
|
||||
const query: DataTableNode = {
|
||||
kind: NodeKind.DataTableNode,
|
||||
full: true,
|
||||
showPropertyFilter: false,
|
||||
source,
|
||||
source: {
|
||||
kind: NodeKind.RevenueExampleDataWarehouseTablesQuery,
|
||||
},
|
||||
}
|
||||
|
||||
return query
|
||||
@@ -337,7 +231,7 @@ export const revenueEventsSettingsLogic = kea<revenueEventsSettingsLogicType>([
|
||||
},
|
||||
})),
|
||||
beforeUnload(({ actions, values }) => ({
|
||||
enabled: () => values.changesMadeToEvents || values.changesMadeToDataWarehouseTables,
|
||||
enabled: () => values.changesMadeToEvents,
|
||||
message: 'Changes you made will be discarded. Make sure you save your changes before leaving this page.',
|
||||
onConfirm: () => {
|
||||
actions.resetConfig()
|
||||
30
products/revenue_analytics/manifest.tsx
Normal file
30
products/revenue_analytics/manifest.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { IconPiggyBank } from '@posthog/icons'
|
||||
import { urls } from 'scenes/urls'
|
||||
|
||||
import { ProductManifest } from '~/types'
|
||||
|
||||
export const manifest: ProductManifest = {
|
||||
name: 'Revenue Analytics',
|
||||
scenes: {
|
||||
RevenueAnalytics: {
|
||||
name: 'Revenue Analytics',
|
||||
import: () => import('./frontend/RevenueAnalyticsScene'),
|
||||
projectBased: true,
|
||||
defaultDocsPath: '/docs/revenue-analytics',
|
||||
activityScope: 'RevenueAnalytics',
|
||||
},
|
||||
},
|
||||
routes: {
|
||||
'/revenue_analytics': ['RevenueAnalytics', 'revenueAnalytics'],
|
||||
},
|
||||
urls: {
|
||||
revenueAnalytics: (): string => '/revenue_analytics',
|
||||
},
|
||||
treeItemsExplore: [
|
||||
{
|
||||
path: 'Explore/Revenue analytics',
|
||||
icon: <IconPiggyBank />,
|
||||
href: () => urls.revenueAnalytics(),
|
||||
},
|
||||
],
|
||||
}
|
||||
14
products/revenue_analytics/package.json
Normal file
14
products/revenue_analytics/package.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "@posthog/products-revenue-analytics",
|
||||
"peerDependencies": {
|
||||
"@posthog/icons": "*",
|
||||
"@storybook/react": "*",
|
||||
"@types/react": "*",
|
||||
"clsx": "*",
|
||||
"kea": "*",
|
||||
"kea-forms": "*",
|
||||
"kea-loaders": "*",
|
||||
"kea-router": "*",
|
||||
"react": "*"
|
||||
}
|
||||
}
|
||||
40
tach.toml
40
tach.toml
@@ -1,26 +1,14 @@
|
||||
interfaces = []
|
||||
exclude = [
|
||||
".*__pycache__",
|
||||
".*egg-info",
|
||||
"docs",
|
||||
"tests",
|
||||
]
|
||||
source_roots = [
|
||||
".",
|
||||
]
|
||||
exclude = [".*__pycache__", ".*egg-info", "docs", "tests"]
|
||||
source_roots = ["."]
|
||||
|
||||
[[modules]]
|
||||
path = "<root>"
|
||||
depends_on = [
|
||||
"posthog",
|
||||
]
|
||||
depends_on = ["posthog"]
|
||||
|
||||
[[modules]]
|
||||
path = "ee"
|
||||
depends_on = [
|
||||
"common.hogvm.python",
|
||||
"posthog",
|
||||
]
|
||||
depends_on = ["common.hogvm.python", "posthog"]
|
||||
|
||||
[[modules]]
|
||||
path = "common.hogql_parser"
|
||||
@@ -28,9 +16,7 @@ depends_on = []
|
||||
|
||||
[[modules]]
|
||||
path = "common.hogvm.python"
|
||||
depends_on = [
|
||||
"posthog",
|
||||
]
|
||||
depends_on = ["posthog"]
|
||||
|
||||
[[modules]]
|
||||
path = "posthog"
|
||||
@@ -39,15 +25,21 @@ depends_on = [
|
||||
"ee",
|
||||
"common.hogql_parser",
|
||||
"common.hogvm.python",
|
||||
|
||||
# NOTE: Add new product dependencies here and in settings/web.py PRODUCTS_APPS
|
||||
"products.early_access_features",
|
||||
"products.editor",
|
||||
"products.revenue_analytics",
|
||||
]
|
||||
|
||||
[[modules]]
|
||||
path = "products.early_access_features"
|
||||
depends_on = [
|
||||
"posthog",
|
||||
]
|
||||
depends_on = ["posthog"]
|
||||
|
||||
[[modules]]
|
||||
path = "products.messaging"
|
||||
depends_on = []
|
||||
path = "products.editor"
|
||||
depends_on = ["posthog"]
|
||||
|
||||
[[modules]]
|
||||
path = "products.revenue_analytics"
|
||||
depends_on = ["posthog"]
|
||||
|
||||
Reference in New Issue
Block a user