chore: adjust ci for rust services, merge master

This commit is contained in:
Neil Kakkar
2024-06-11 16:30:21 +01:00
parent b43108df10
commit ee6cd6d9c2
243 changed files with 5345 additions and 3808 deletions

View File

@@ -7,6 +7,8 @@ on:
push:
branches:
- master
paths-ignore:
- 'rust/**'
jobs:
slack:

View File

@@ -51,7 +51,7 @@ jobs:
# If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined
# there are backend changes, cancel previous runs
- uses: n1hility/cancel-previous-runs@v3
if: github.actor != 'posthog-bot' && needs.changes.outputs.backend == 'true'
if: github.actor != 'posthog-bot' && needs.changes.outputs.hog == 'true'
with:
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -13,6 +13,8 @@ on:
push:
branches:
- master
paths-ignore:
- 'rust/**'
workflow_dispatch:
jobs:
@@ -120,17 +122,22 @@ jobs:
- name: Trigger Batch Exports Temporal Worker Cloud deployment
if: steps.check_changes_batch_exports_temporal_worker.outputs.changed == 'true'
uses: mvasigh/dispatch-action@main
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ steps.deployer.outputs.token }}
repo: charts
owner: PostHog
event_type: temporal_worker_deploy
message: |
repository: PostHog/charts
event-type: commit_state_update
client-payload: |
{
"image_tag": "${{ steps.build.outputs.digest }}",
"worker_name": "temporal-worker",
"context": ${{ toJson(github) }}
"values": {
"image": {
"sha": "${{ steps.build.outputs.digest }}"
}
},
"release": "temporal-worker",
"commit": ${{ toJson(github.event.head_commit) }},
"repository": ${{ toJson(github.repository) }},
"labels": ${{ steps.labels.outputs.labels }}
}
- name: Check for changes that affect general purpose temporal worker
@@ -140,17 +147,22 @@ jobs:
- name: Trigger General Purpose Temporal Worker Cloud deployment
if: steps.check_changes_general_purpose_temporal_worker.outputs.changed == 'true'
uses: mvasigh/dispatch-action@main
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ steps.deployer.outputs.token }}
repo: charts
owner: PostHog
event_type: temporal_worker_deploy
message: |
repository: PostHog/charts
event-type: commit_state_update
client-payload: |
{
"image_tag": "${{ steps.build.outputs.digest }}",
"worker_name": "temporal-worker-general-purpose",
"context": ${{ toJson(github) }}
"values": {
"image": {
"sha": "${{ steps.build.outputs.digest }}"
}
},
"release": "temporal-worker-general-purpose",
"commit": ${{ toJson(github.event.head_commit) }},
"repository": ${{ toJson(github.repository) }},
"labels": ${{ steps.labels.outputs.labels }}
}
- name: Check for changes that affect data warehouse temporal worker
@@ -160,15 +172,20 @@ jobs:
- name: Trigger Data Warehouse Temporal Worker Cloud deployment
if: steps.check_changes_data_warehouse_temporal_worker.outputs.changed == 'true'
uses: mvasigh/dispatch-action@main
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ steps.deployer.outputs.token }}
repo: charts
owner: PostHog
event_type: temporal_worker_deploy
message: |
repository: PostHog/charts
event-type: commit_state_update
client-payload: |
{
"image_tag": "${{ steps.build.outputs.digest }}",
"worker_name": "temporal-worker-data-warehouse",
"context": ${{ toJson(github) }}
"values": {
"image": {
"sha": "${{ steps.build.outputs.digest }}"
}
},
"release": "temporal-worker-data-warehouse",
"commit": ${{ toJson(github.event.head_commit) }},
"repository": ${{ toJson(github.repository) }},
"labels": ${{ steps.labels.outputs.labels }}
}

View File

@@ -3,8 +3,10 @@ name: Build container images
on:
workflow_dispatch:
push:
paths:
- 'rust/**'
branches:
- 'main'
- 'master'
jobs:
build:
@@ -22,9 +24,19 @@ jobs:
contents: read # allow reading the repo contents
packages: write # allow push to ghcr.io
defaults:
run:
working-directory: rust
steps:
- name: Check Out Repo
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
uses: actions/checkout@v3
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Set up Depot CLI
uses: depot/setup-action@v1
@@ -41,6 +53,7 @@ jobs:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
logout: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
@@ -49,7 +62,7 @@ jobs:
id: meta
uses: docker/metadata-action@v4
with:
images: ghcr.io/posthog/hog-rs/${{ matrix.image }}
images: ghcr.io/posthog/posthog/${{ matrix.image }}
tags: |
type=ref,event=pr
type=ref,event=branch
@@ -65,8 +78,8 @@ jobs:
id: docker_build
uses: depot/build-push-action@v1
with:
context: ./
file: ./Dockerfile
context: ./rust/
file: ./rust/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -3,8 +3,10 @@ name: Build hook-migrator docker image
on:
workflow_dispatch:
push:
paths:
- 'rust/**'
branches:
- 'main'
- 'master'
permissions:
packages: write
@@ -18,9 +20,19 @@ jobs:
contents: read # allow reading the repo contents
packages: write # allow push to ghcr.io
defaults:
run:
working-directory: rust
steps:
- name: Check Out Repo
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
uses: actions/checkout@v3
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Set up Depot CLI
uses: depot/setup-action@v1
@@ -61,8 +73,8 @@ jobs:
id: docker_build_hook_migrator
uses: depot/build-push-action@v1
with:
context: ./
file: ./Dockerfile.migrate
context: ./rust/
file: ./rust/Dockerfile.migrate
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

192
.github/workflows/rust.yml vendored Normal file
View File

@@ -0,0 +1,192 @@
name: Rust
on:
workflow_dispatch:
push:
branches: [master, main]
pull_request:
env:
CARGO_TERM_COLOR: always
jobs:
# Job to decide if we should run rust ci
# See https://github.com/dorny/paths-filter#conditional-execution for more details
changes:
runs-on: ubuntu-latest
timeout-minutes: 5
if: github.repository == 'PostHog/posthog'
name: Determine need to run rust checks
# Set job outputs to values from filter step
outputs:
rust: ${{ steps.filter.outputs.rust }}
steps:
# For pull requests it's not necessary to checkout the code, but we
# also want this to run on master so we need to checkout
- uses: actions/checkout@v3
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
rust:
# Avoid running rust tests for irrelevant changes
- 'rust/**'
build:
needs: changes
runs-on: depot-ubuntu-22.04-4
defaults:
run:
working-directory: rust
steps:
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
- uses: actions/checkout@v3
if: needs.changes.outputs.rust == 'true'
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Install rust
if: needs.changes.outputs.rust == 'true'
uses: dtolnay/rust-toolchain@1.77
- uses: actions/cache@v3
if: needs.changes.outputs.rust == 'true'
with:
path: |
~/.cargo/registry
~/.cargo/git
rust/target
key: ${{ runner.os }}-cargo-release-${{ hashFiles('**/Cargo.lock') }}
- name: Run cargo build
if: needs.changes.outputs.rust == 'true'
run: cargo build --all --locked --release && find target/release/ -maxdepth 1 -executable -type f | xargs strip
test:
needs: changes
runs-on: depot-ubuntu-22.04-4
timeout-minutes: 10
defaults:
run:
working-directory: rust
steps:
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
- uses: actions/checkout@v3
if: needs.changes.outputs.rust == 'true'
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Login to DockerHub
if: needs.changes.outputs.rust == 'true'
uses: docker/login-action@v2
with:
username: posthog
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Setup dependencies
if: needs.changes.outputs.rust == 'true'
run: |
docker compose up kafka redis db echo_server -d --wait
docker compose up setup_test_db
echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Install rust
if: needs.changes.outputs.rust == 'true'
uses: dtolnay/rust-toolchain@1.77
- uses: actions/cache@v3
if: needs.changes.outputs.rust == 'true'
with:
path: |
~/.cargo/registry
~/.cargo/git
rust/target
key: ${ runner.os }-cargo-debug-${{ hashFiles('**/Cargo.lock') }}
- name: Run cargo test
if: needs.changes.outputs.rust == 'true'
run: cargo test --all-features
linting:
needs: changes
runs-on: depot-ubuntu-22.04-4
defaults:
run:
working-directory: rust
steps:
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
- uses: actions/checkout@v3
if: needs.changes.outputs.rust == 'true'
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Install rust
if: needs.changes.outputs.rust == 'true'
uses: dtolnay/rust-toolchain@1.77
with:
components: clippy,rustfmt
- uses: actions/cache@v3
if: needs.changes.outputs.rust == 'true'
with:
path: |
~/.cargo/registry
~/.cargo/git
rust/target
key: ${{ runner.os }}-cargo-debug-${{ hashFiles('**/Cargo.lock') }}
- name: Check format
if: needs.changes.outputs.rust == 'true'
run: cargo fmt -- --check
- name: Run clippy
if: needs.changes.outputs.rust == 'true'
run: cargo clippy -- -D warnings
- name: Run cargo check
if: needs.changes.outputs.rust == 'true'
run: cargo check --all-features
shear:
needs: changes
runs-on: depot-ubuntu-22.04-4
defaults:
run:
working-directory: rust
steps:
# Checkout project code
# Use sparse checkout to only select files in rust directory
# Turning off cone mode ensures that files in the project root are not included during checkout
- uses: actions/checkout@v3
if: needs.changes.outputs.rust == 'true'
with:
sparse-checkout: 'rust/'
sparse-checkout-cone-mode: false
- name: Install cargo-binstall
if: needs.changes.outputs.rust == 'true'
uses: cargo-bins/cargo-binstall@main
- name: Install cargo-shear
if: needs.changes.outputs.rust == 'true'
run: cargo binstall --no-confirm cargo-shear
- run: cargo shear
if: needs.changes.outputs.rust == 'true'

View File

@@ -19,13 +19,15 @@
<a href="https://www.youtube.com/watch?v=2jQco8hEvTI">
<img src="https://img.youtube.com/vi/2jQco8hEvTI/0.jpg" alt="PostHog Demonstration">
</a>
<em>See PostHog in action</em>
<br
<em><a href="https://www.youtube.com/watch?v=2jQco8hEvTI">See PostHog in action</a></em>
</p>
## PostHog is an all-in-one, open source platform for building better products
- Specify events manually, or use autocapture to get started quickly
- Analyze data with ready-made visualizations, or do it yourself with SQL
- Only capture properties on the people you want to track, save money when you don't
- Gather insights by capturing session replays, console logs, and network monitoring
- Improve your product with A/B testing that automatically analyzes performance
- Safely roll out features to select users or cohorts with feature flags
@@ -38,7 +40,7 @@ PostHog is available with hosting in the EU or US and is fully SOC 2 compliant.
- 1 million feature flag requests
- 250 survey responses
We're constantly adding new features, with web analytics and data warehouse now in beta!
We're constantly adding new features, with <a href="https://posthog.com/docs/web-analytics">web analytics</a> and <a href="https://posthog.com/docs/data-warehouse">data warehouse</a> now in beta!
## Table of Contents
@@ -73,7 +75,7 @@ PostHog brings all the tools and data you need to build better products.
### Analytics and optimization tools
- **Event-based analytics:** Capture your product's usage [automatically](https://posthog.com/docs/libraries/js#autocapture), or [customize](https://posthog.com/docs/getting-started/install) it to your needs
- **User and group tracking:** Understand the [people](https://posthog.com/manual/persons) and [groups](https://posthog.com/manual/group-analytics) behind the events and track properties about them
- **User and group tracking:** Understand the [people](https://posthog.com/manual/persons) and [groups](https://posthog.com/manual/group-analytics) behind the events and track properties about them when needed
- **Data visualizations:** Create and share [graphs](https://posthog.com/docs/features/trends), [funnels](https://posthog.com/docs/features/funnels), [paths](https://posthog.com/docs/features/paths), [retention](https://posthog.com/docs/features/retention), and [dashboards](https://posthog.com/docs/features/dashboards)
- **SQL access:** Use [SQL](https://posthog.com/docs/product-analytics/sql) to get a deeper understanding of your users, breakdown information and create completely tailored visualizations
- **Session replays:** [Watch videos](https://posthog.com/docs/features/session-recording) of your users' behavior, with fine-grained filters and privacy controls, as well as network monitoring and captured console logs

View File

@@ -9,7 +9,8 @@ datamodel-codegen \
--input frontend/src/queries/schema.json --input-file-type jsonschema \
--output posthog/schema.py --output-model-type pydantic_v2.BaseModel \
--custom-file-header "# mypy: disable-error-code=\"assignment\"" \
--set-default-enum-member
--set-default-enum-member --capitalise-enum-members \
--wrap-string-literal
# Format schema.py
ruff format posthog/schema.py

View File

@@ -115,6 +115,13 @@ services:
CLICKHOUSE_SECURE: 'false'
CLICKHOUSE_VERIFY: 'false'
livestream:
image: 'ghcr.io/posthog/livestream:main'
restart: on-failure
depends_on:
kafka:
condition: service_started
migrate:
<<: *worker
command: sh -c "

View File

@@ -71,6 +71,14 @@ services:
- '1080:1080'
- '1025:1025'
webhook-tester:
image: tarampampam/webhook-tester:1.1.0
restart: on-failure
ports:
- '2080:2080'
environment:
- PORT=2080
worker:
extends:
file: docker-compose.base.yml

View File

@@ -95,6 +95,14 @@ services:
- '1080:1080'
- '1025:1025'
webhook-tester:
image: tarampampam/webhook-tester:1.1.0
restart: on-failure
ports:
- '2080:2080'
environment:
- LISTEN_PORT=2080
# Optional capture
capture:
profiles: ['capture-rs']
@@ -109,6 +117,17 @@ services:
- redis
- kafka
livestream:
extends:
file: docker-compose.base.yml
service: livestream
environment:
- JWT.TOKEN=${SECRET_KEY}
ports:
- '8666:8080'
volumes:
- ./docker/livestream/configs-dev.yml:/configs/configs.yml
# Temporal containers
elasticsearch:
extends:

View File

@@ -142,9 +142,11 @@ class TestCohort(ClickhouseTestMixin, BaseTest):
query, params = parse_prop_grouped_clauses(
team_id=self.team.pk,
property_group=filter.property_groups,
person_properties_mode=PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.disabled
else PersonPropertiesMode.DIRECT_ON_EVENTS,
person_properties_mode=(
PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.DISABLED
else PersonPropertiesMode.DIRECT_ON_EVENTS
),
hogql_context=filter.hogql_context,
)
final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query)
@@ -197,9 +199,11 @@ class TestCohort(ClickhouseTestMixin, BaseTest):
query, params = parse_prop_grouped_clauses(
team_id=self.team.pk,
property_group=filter.property_groups,
person_properties_mode=PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.disabled
else PersonPropertiesMode.DIRECT_ON_EVENTS,
person_properties_mode=(
PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.DISABLED
else PersonPropertiesMode.DIRECT_ON_EVENTS
),
hogql_context=filter.hogql_context,
)
final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query)
@@ -222,9 +226,11 @@ class TestCohort(ClickhouseTestMixin, BaseTest):
query, params = parse_prop_grouped_clauses(
team_id=self.team.pk,
property_group=filter.property_groups,
person_properties_mode=PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.disabled
else PersonPropertiesMode.DIRECT_ON_EVENTS,
person_properties_mode=(
PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.DISABLED
else PersonPropertiesMode.DIRECT_ON_EVENTS
),
hogql_context=filter.hogql_context,
)
final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query)
@@ -273,9 +279,11 @@ class TestCohort(ClickhouseTestMixin, BaseTest):
query, params = parse_prop_grouped_clauses(
team_id=self.team.pk,
property_group=filter.property_groups,
person_properties_mode=PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.disabled
else PersonPropertiesMode.DIRECT_ON_EVENTS,
person_properties_mode=(
PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.DISABLED
else PersonPropertiesMode.DIRECT_ON_EVENTS
),
hogql_context=filter.hogql_context,
)
final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query)
@@ -294,9 +302,11 @@ class TestCohort(ClickhouseTestMixin, BaseTest):
query, params = parse_prop_grouped_clauses(
team_id=self.team.pk,
property_group=filter.property_groups,
person_properties_mode=PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.disabled
else PersonPropertiesMode.DIRECT_ON_EVENTS,
person_properties_mode=(
PersonPropertiesMode.USING_SUBQUERY
if self.team.person_on_events_mode == PersonsOnEventsMode.DISABLED
else PersonPropertiesMode.DIRECT_ON_EVENTS
),
hogql_context=filter.hogql_context,
)
final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query)

View File

@@ -319,7 +319,7 @@ class EnterpriseCohortQuery(FOSSCohortQuery):
event_param_name = f"{self._cohort_pk}_event_ids"
if self.should_pushdown_persons and self._person_on_events_mode != PersonsOnEventsMode.disabled:
if self.should_pushdown_persons and self._person_on_events_mode != PersonsOnEventsMode.DISABLED:
person_prop_query, person_prop_params = self._get_prop_groups(
self._inner_property_groups,
person_properties_mode=PersonPropertiesMode.DIRECT_ON_EVENTS,

View File

@@ -37,7 +37,7 @@ class EnterpriseEventQuery(EventQuery):
extra_event_properties: Optional[list[PropertyName]] = None,
extra_person_fields: Optional[list[ColumnName]] = None,
override_aggregate_users_by_distinct_id: Optional[bool] = None,
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled,
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.DISABLED,
**kwargs,
) -> None:
if extra_person_fields is None:

View File

@@ -152,7 +152,7 @@ class FunnelCorrelation:
def properties_to_include(self) -> list[str]:
props_to_include = []
if (
self._team.person_on_events_mode != PersonsOnEventsMode.disabled
self._team.person_on_events_mode != PersonsOnEventsMode.DISABLED
and self._filter.correlation_type == FunnelCorrelationType.PROPERTIES
):
# When dealing with properties, make sure funnel response comes with properties
@@ -432,7 +432,7 @@ class FunnelCorrelation:
return query, params
def _get_aggregation_target_join_query(self) -> str:
if self._team.person_on_events_mode == PersonsOnEventsMode.person_id_no_override_properties_on_events:
if self._team.person_on_events_mode == PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS:
aggregation_person_join = f"""
JOIN funnel_actors as actors
ON event.person_id = actors.actor_id
@@ -499,7 +499,7 @@ class FunnelCorrelation:
def _get_aggregation_join_query(self):
if self._filter.aggregation_group_type_index is None:
if self._team.person_on_events_mode != PersonsOnEventsMode.disabled and groups_on_events_querying_enabled():
if self._team.person_on_events_mode != PersonsOnEventsMode.DISABLED and groups_on_events_querying_enabled():
return "", {}
person_query, person_query_params = PersonQuery(
@@ -519,7 +519,7 @@ class FunnelCorrelation:
return GroupsJoinQuery(self._filter, self._team.pk, join_key="funnel_actors.actor_id").get_join_query()
def _get_properties_prop_clause(self):
if self._team.person_on_events_mode != PersonsOnEventsMode.disabled and groups_on_events_querying_enabled():
if self._team.person_on_events_mode != PersonsOnEventsMode.DISABLED and groups_on_events_querying_enabled():
group_properties_field = f"group{self._filter.aggregation_group_type_index}_properties"
aggregation_properties_alias = (
"person_properties" if self._filter.aggregation_group_type_index is None else group_properties_field
@@ -546,7 +546,7 @@ class FunnelCorrelation:
param_name = f"property_name_{index}"
if self._filter.aggregation_group_type_index is not None:
expression, _ = get_property_string_expr(
"groups" if self._team.person_on_events_mode == PersonsOnEventsMode.disabled else "events",
"groups" if self._team.person_on_events_mode == PersonsOnEventsMode.DISABLED else "events",
property_name,
f"%({param_name})s",
aggregation_properties_alias,
@@ -554,13 +554,15 @@ class FunnelCorrelation:
)
else:
expression, _ = get_property_string_expr(
"person" if self._team.person_on_events_mode == PersonsOnEventsMode.disabled else "events",
"person" if self._team.person_on_events_mode == PersonsOnEventsMode.DISABLED else "events",
property_name,
f"%({param_name})s",
aggregation_properties_alias,
materialised_table_column=aggregation_properties_alias
if self._team.person_on_events_mode != PersonsOnEventsMode.disabled
else "properties",
materialised_table_column=(
aggregation_properties_alias
if self._team.person_on_events_mode != PersonsOnEventsMode.DISABLED
else "properties"
),
)
person_property_params[param_name] = property_name
person_property_expressions.append(expression)

View File

@@ -27,7 +27,7 @@ class GroupsJoinQuery:
team_id: int,
column_optimizer: Optional[EnterpriseColumnOptimizer] = None,
join_key: Optional[str] = None,
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled,
person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.DISABLED,
) -> None:
self._filter = filter
self._team_id = team_id
@@ -38,7 +38,7 @@ class GroupsJoinQuery:
def get_join_query(self) -> tuple[str, dict]:
join_queries, params = [], {}
if self._person_on_events_mode != PersonsOnEventsMode.disabled and groups_on_events_querying_enabled():
if self._person_on_events_mode != PersonsOnEventsMode.DISABLED and groups_on_events_querying_enabled():
return "", {}
for group_type_index in self._column_optimizer.group_types_to_query:

View File

@@ -64,7 +64,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
True,
False,
False,
PersonsOnEventsMode.person_id_no_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
@@ -80,7 +80,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
False,
False,
PersonsOnEventsMode.disabled,
PersonsOnEventsMode.DISABLED,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
@@ -96,7 +96,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
True,
False,
PersonsOnEventsMode.person_id_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_names": [],
"event_start_time": mock.ANY,
@@ -112,7 +112,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
True,
True,
PersonsOnEventsMode.person_id_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_end_time": mock.ANY,
"event_names": [],

View File

@@ -62,7 +62,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
True,
False,
False,
PersonsOnEventsMode.person_id_no_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
@@ -78,7 +78,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
False,
False,
PersonsOnEventsMode.disabled,
PersonsOnEventsMode.DISABLED,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
@@ -94,7 +94,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
True,
False,
PersonsOnEventsMode.person_id_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_names": [],
"event_start_time": mock.ANY,
@@ -110,7 +110,7 @@ class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin,
False,
True,
True,
PersonsOnEventsMode.person_id_override_properties_on_events,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_end_time": mock.ANY,
"event_names": [],

Binary file not shown.

Before

Width:  |  Height:  |  Size: 175 KiB

After

Width:  |  Height:  |  Size: 173 KiB

View File

@@ -17,6 +17,7 @@ import {
IconServer,
IconTestTube,
IconToggle,
IconWarning,
} from '@posthog/icons'
import { lemonToast, Spinner } from '@posthog/lemon-ui'
import { captureException } from '@sentry/react'
@@ -450,6 +451,14 @@ export const navigation3000Logic = kea<navigation3000LogicType>([
icon: <IconRewindPlay />,
to: urls.replay(),
},
featureFlags[FEATURE_FLAGS.ERROR_TRACKING]
? {
identifier: Scene.ErrorTracking,
label: 'Error tracking',
icon: <IconWarning />,
to: urls.errorTracking(),
}
: null,
featureFlags[FEATURE_FLAGS.HEATMAPS_UI]
? {
identifier: Scene.Heatmaps,

View File

@@ -43,6 +43,7 @@ import {
FeatureFlagType,
Group,
GroupListParams,
HogFunctionType,
InsightModel,
IntegrationType,
ListOrganizationMembersParams,
@@ -320,6 +321,14 @@ class ApiRequest {
return this.pluginConfig(pluginConfigId, teamId).addPathComponent('logs')
}
public hogFunctions(teamId?: TeamType['id']): ApiRequest {
return this.projectsDetail(teamId).addPathComponent('hog_functions')
}
public hogFunction(id: HogFunctionType['id'], teamId?: TeamType['id']): ApiRequest {
return this.hogFunctions(teamId).addPathComponent(id)
}
// # Actions
public actions(teamId?: TeamType['id']): ApiRequest {
return this.projectsDetail(teamId).addPathComponent('actions')
@@ -1634,6 +1643,24 @@ const api = {
},
},
hogFunctions: {
async listTemplates(): Promise<PaginatedResponse<HogFunctionType>> {
return await new ApiRequest().hogFunctions().get()
},
async list(): Promise<PaginatedResponse<HogFunctionType>> {
return await new ApiRequest().hogFunctions().get()
},
async get(id: HogFunctionType['id']): Promise<HogFunctionType> {
return await new ApiRequest().hogFunction(id).get()
},
async create(data: Partial<HogFunctionType>): Promise<HogFunctionType> {
return await new ApiRequest().hogFunctions().create({ data })
},
async update(id: HogFunctionType['id'], data: Partial<HogFunctionType>): Promise<HogFunctionType> {
return await new ApiRequest().hogFunction(id).update({ data })
},
},
annotations: {
async get(annotationId: RawAnnotationType['id']): Promise<RawAnnotationType> {
return await new ApiRequest().annotation(annotationId).get()
@@ -1978,6 +2005,12 @@ const api = {
async reload(sourceId: ExternalDataStripeSource['id']): Promise<void> {
await new ApiRequest().externalDataSource(sourceId).withAction('reload').create()
},
async update(
sourceId: ExternalDataStripeSource['id'],
data: Partial<ExternalDataStripeSource>
): Promise<ExternalDataStripeSource> {
return await new ApiRequest().externalDataSource(sourceId).update({ data })
},
async database_schema(
source_type: ExternalDataSourceType,
payload: Record<string, any>

View File

@@ -51,6 +51,10 @@
border: none;
border-radius: 0;
}
.WebAnalyticsDashboard .InsightVizDisplay & {
min-height: var(--insight-viz-min-height);
}
}
.InsightDetails,

View File

@@ -5,7 +5,7 @@ import clsx from 'clsx'
import { useValues } from 'kea'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { copyToClipboard } from 'lib/utils/copyToClipboard'
import { CSSProperties, useEffect, useState } from 'react'
import { useEffect, useState } from 'react'
import { PrismAsyncLight as SyntaxHighlighter } from 'react-syntax-highlighter'
import bash from 'react-syntax-highlighter/dist/esm/languages/prism/bash'
import dart from 'react-syntax-highlighter/dist/esm/languages/prism/dart'
@@ -81,7 +81,7 @@ export interface CodeSnippetProps {
wrap?: boolean
compact?: boolean
actions?: JSX.Element
style?: CSSProperties
className?: string
/** What is being copied. @example 'link' */
thing?: string
/** If set, the snippet becomes expandable when there's more than this number of lines. */
@@ -93,7 +93,7 @@ export function CodeSnippet({
language = Language.Text,
wrap = false,
compact = false,
style,
className,
actions,
thing = 'snippet',
maxLinesWithoutExpansion,
@@ -120,8 +120,7 @@ export function CodeSnippet({
}
return (
// eslint-disable-next-line react/forbid-dom-props
<div className={clsx('CodeSnippet', compact && 'CodeSnippet--compact')} style={style}>
<div className={clsx('CodeSnippet', compact && 'CodeSnippet--compact', className)}>
<div className="CodeSnippet__actions">
{actions}
<LemonButton

View File

@@ -153,7 +153,7 @@ function DebugCHQueries(): JSX.Element {
language={Language.SQL}
thing="query"
maxLinesWithoutExpansion={10}
style={{ fontSize: 12, maxWidth: '60vw' }}
className="text-sm max-w-[60vw]"
>
{item.query}
</CodeSnippet>
@@ -263,7 +263,7 @@ function DebugCHQueries(): JSX.Element {
language={Language.JSON}
maxLinesWithoutExpansion={0}
key={item.query_id}
style={{ fontSize: 12, marginBottom: '0.25rem' }}
className="text-sm mb-2"
>
{JSON.stringify(event, null, 2)}
</CodeSnippet>

View File

@@ -38,6 +38,7 @@ import {
IconTrends,
IconUnlock,
IconUserPaths,
IconWarning,
IconX,
} from '@posthog/icons'
import { Parser } from 'expr-eval'
@@ -581,6 +582,17 @@ export const commandPaletteLogic = kea<commandPaletteLogicType>([
},
]
: []),
...(values.featureFlags[FEATURE_FLAGS.ERROR_TRACKING]
? [
{
icon: IconWarning,
display: 'Go to Error tracking',
executor: () => {
push(urls.errorTracking())
},
},
]
: []),
{
display: 'Go to Session replay',
icon: IconRewindPlay,

View File

@@ -1,7 +1,7 @@
import { Meta } from '@storybook/react'
import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay'
import { EventType, RecordingEventType } from '~/types'
import { EventType } from '~/types'
const meta: Meta<typeof ErrorDisplay> = {
title: 'Components/Errors/Error Display',
@@ -9,104 +9,95 @@ const meta: Meta<typeof ErrorDisplay> = {
}
export default meta
function errorEvent(properties: Record<string, any>): EventType | RecordingEventType {
function errorProperties(properties: Record<string, any>): EventType['properties'] {
return {
id: '12345',
elements: [],
uuid: '018880b6-b781-0008-a2e5-629b2624fd2f',
event: '$exception',
properties: {
$os: 'Windows',
$os_version: '10.0',
$browser: 'Chrome',
$device_type: 'Desktop',
$current_url: 'https://app.posthog.com/home',
$host: 'app.posthog.com',
$pathname: '/home',
$browser_version: 113,
$browser_language: 'es-ES',
$screen_height: 1080,
$screen_width: 1920,
$viewport_height: 929,
$viewport_width: 1920,
$lib: 'web',
$lib_version: '1.63.3',
distinct_id: 'iOizUPH4RH65nZjvGVBz5zZUmwdHvq2mxzNySQqqYkG',
$device_id: '186144e7357245-0cfe8bf1b5b877-26021051-1fa400-186144e7358d3',
$active_feature_flags: ['are-the-flags', 'important-for-the-error'],
$feature_flag_payloads: {
'are-the-flags': '{\n "flag": "payload"\n}',
},
$user_id: 'iOizUPH4RH65nZjvGVBz5zZUmwdHvq2mxzNySQqqYkG',
$groups: {
project: '00000000-0000-0000-1847-88f0ffa23444',
organization: '00000000-0000-0000-a050-5d4557279956',
customer: 'the-customer',
instance: 'https://app.posthog.com',
},
$exception_message: 'ResizeObserver loop limit exceeded',
$exception_type: 'Error',
$exception_personURL: 'https://app.posthog.com/person/the-person-id',
$sentry_event_id: 'id-from-the-sentry-integration',
$sentry_exception: {
values: [
{
value: 'ResizeObserver loop limit exceeded',
type: 'Error',
mechanism: {
type: 'onerror',
handled: false,
synthetic: true,
},
stacktrace: {
frames: [
{
colno: 0,
filename: 'https://app.posthog.com/home',
function: '?',
in_app: true,
lineno: 0,
},
],
},
},
],
},
$sentry_exception_message: 'ResizeObserver loop limit exceeded',
$sentry_exception_type: 'Error',
$sentry_tags: {
'PostHog Person URL': 'https://app.posthog.com/person/the-person-id',
'PostHog Recording URL': 'https://app.posthog.com/replay/the-session-id?t=866',
},
$sentry_url:
'https://sentry.io/organizations/posthog/issues/?project=the-sentry-project-id&query=the-sentry-id',
$session_id: 'the-session-id',
$window_id: 'the-window-id',
$pageview_id: 'the-pageview-id',
$sent_at: '2023-06-03T10:03:57.787000+00:00',
$geoip_city_name: 'Whoville',
$geoip_country_name: 'Wholand',
$geoip_country_code: 'WH',
$geoip_continent_name: 'Mystery',
$geoip_continent_code: 'MY',
$geoip_latitude: -30.5023,
$geoip_longitude: -71.1545,
$geoip_time_zone: 'UTC',
$lib_version__major: 1,
$lib_version__minor: 63,
$lib_version__patch: 3,
...properties,
$os: 'Windows',
$os_version: '10.0',
$browser: 'Chrome',
$device_type: 'Desktop',
$current_url: 'https://app.posthog.com/home',
$host: 'app.posthog.com',
$pathname: '/home',
$browser_version: 113,
$browser_language: 'es-ES',
$screen_height: 1080,
$screen_width: 1920,
$viewport_height: 929,
$viewport_width: 1920,
$lib: 'web',
$lib_version: '1.63.3',
distinct_id: 'iOizUPH4RH65nZjvGVBz5zZUmwdHvq2mxzNySQqqYkG',
$device_id: '186144e7357245-0cfe8bf1b5b877-26021051-1fa400-186144e7358d3',
$active_feature_flags: ['are-the-flags', 'important-for-the-error'],
$feature_flag_payloads: {
'are-the-flags': '{\n "flag": "payload"\n}',
},
timestamp: '2023-06-03T03:03:57.316-07:00',
distinct_id: 'the-distinct-id',
elements_chain: '',
$user_id: 'iOizUPH4RH65nZjvGVBz5zZUmwdHvq2mxzNySQqqYkG',
$groups: {
project: '00000000-0000-0000-1847-88f0ffa23444',
organization: '00000000-0000-0000-a050-5d4557279956',
customer: 'the-customer',
instance: 'https://app.posthog.com',
},
$exception_message: 'ResizeObserver loop limit exceeded',
$exception_type: 'Error',
$exception_personURL: 'https://app.posthog.com/person/the-person-id',
$sentry_event_id: 'id-from-the-sentry-integration',
$sentry_exception: {
values: [
{
value: 'ResizeObserver loop limit exceeded',
type: 'Error',
mechanism: {
type: 'onerror',
handled: false,
synthetic: true,
},
stacktrace: {
frames: [
{
colno: 0,
filename: 'https://app.posthog.com/home',
function: '?',
in_app: true,
lineno: 0,
},
],
},
},
],
},
$sentry_exception_message: 'ResizeObserver loop limit exceeded',
$sentry_exception_type: 'Error',
$sentry_tags: {
'PostHog Person URL': 'https://app.posthog.com/person/the-person-id',
'PostHog Recording URL': 'https://app.posthog.com/replay/the-session-id?t=866',
},
$sentry_url:
'https://sentry.io/organizations/posthog/issues/?project=the-sentry-project-id&query=the-sentry-id',
$session_id: 'the-session-id',
$window_id: 'the-window-id',
$pageview_id: 'the-pageview-id',
$sent_at: '2023-06-03T10:03:57.787000+00:00',
$geoip_city_name: 'Whoville',
$geoip_country_name: 'Wholand',
$geoip_country_code: 'WH',
$geoip_continent_name: 'Mystery',
$geoip_continent_code: 'MY',
$geoip_latitude: -30.5023,
$geoip_longitude: -71.1545,
$geoip_time_zone: 'UTC',
$lib_version__major: 1,
$lib_version__minor: 63,
$lib_version__patch: 3,
...properties,
}
}
export function ResizeObserverLoopLimitExceeded(): JSX.Element {
return (
<ErrorDisplay
event={errorEvent({
eventProperties={errorProperties({
$exception_message: 'ResizeObserver loop limit exceeded',
$exception_type: 'Error',
$exception_personURL: 'https://app.posthog.com/person/the-person-id',
@@ -118,7 +109,7 @@ export function ResizeObserverLoopLimitExceeded(): JSX.Element {
export function SafariScriptError(): JSX.Element {
return (
<ErrorDisplay
event={errorEvent({
eventProperties={errorProperties({
$exception_type: 'Error',
$exception_message: 'Script error.',
$exception_is_synthetic: true,
@@ -130,7 +121,7 @@ export function SafariScriptError(): JSX.Element {
export function ImportingModule(): JSX.Element {
return (
<ErrorDisplay
event={errorEvent({
eventProperties={errorProperties({
$exception_type: 'UnhandledRejection',
$exception_message: "Importing module '/static/chunk-PIJHGO7Q.js' is not found.",
$exception_stack_trace_raw: '[]',
@@ -143,7 +134,7 @@ export function ImportingModule(): JSX.Element {
export function AnonymousErrorWithStackTrace(): JSX.Element {
return (
<ErrorDisplay
event={errorEvent({
eventProperties={errorProperties({
$exception_type: 'Error',
$exception_message: 'wat',
$exception_stack_trace_raw:

View File

@@ -5,7 +5,7 @@ import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag'
import { Link } from 'lib/lemon-ui/Link'
import posthog from 'posthog-js'
import { EventType, RecordingEventType } from '~/types'
import { EventType } from '~/types'
interface StackFrame {
filename: string
@@ -156,11 +156,7 @@ export function getExceptionPropertiesFrom(eventProperties: Record<string, any>)
}
}
export function ErrorDisplay({ event }: { event: EventType | RecordingEventType }): JSX.Element {
if (event.event !== '$exception') {
return <>Unknown type of error</>
}
export function ErrorDisplay({ eventProperties }: { eventProperties: EventType['properties'] }): JSX.Element {
const {
$exception_type,
$exception_message,
@@ -175,7 +171,7 @@ export function ErrorDisplay({ event }: { event: EventType | RecordingEventType
$sentry_url,
$exception_stack_trace_raw,
$level,
} = getExceptionPropertiesFrom(event.properties)
} = getExceptionPropertiesFrom(eventProperties)
return (
<div className="flex flex-col space-y-2 pr-4 pb-2">

View File

@@ -128,7 +128,7 @@ export function PropertyValue({
loading={options[propertyKey]?.status === 'loading'}
value={formattedValues}
mode={isMultiSelect ? 'multiple' : 'single'}
allowCustomValues
allowCustomValues={options[propertyKey]?.allowCustomValues}
onChange={(nextVal) => (isMultiSelect ? setValue(nextVal) : setValue(nextVal[0]))}
onInputChange={onSearchTextChange}
placeholder={placeholder}

View File

@@ -27,7 +27,7 @@ import {
PropertyGroupFilterValue,
PropertyOperator,
PropertyType,
RecordingDurationFilter,
RecordingPropertyFilter,
SessionPropertyFilter,
} from '~/types'
@@ -89,22 +89,21 @@ export function convertPropertyGroupToProperties(
return properties
}
export const PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE: Omit<
Record<PropertyFilterType, TaxonomicFilterGroupType>,
PropertyFilterType.Recording // Recording filters are not part of the taxonomic filter, only Replay-specific UI
> = {
[PropertyFilterType.Meta]: TaxonomicFilterGroupType.Metadata,
[PropertyFilterType.Person]: TaxonomicFilterGroupType.PersonProperties,
[PropertyFilterType.Event]: TaxonomicFilterGroupType.EventProperties,
[PropertyFilterType.Feature]: TaxonomicFilterGroupType.EventFeatureFlags,
[PropertyFilterType.Cohort]: TaxonomicFilterGroupType.Cohorts,
[PropertyFilterType.Element]: TaxonomicFilterGroupType.Elements,
[PropertyFilterType.Session]: TaxonomicFilterGroupType.SessionProperties,
[PropertyFilterType.HogQL]: TaxonomicFilterGroupType.HogQLExpression,
[PropertyFilterType.Group]: TaxonomicFilterGroupType.GroupsPrefix,
[PropertyFilterType.DataWarehouse]: TaxonomicFilterGroupType.DataWarehouse,
[PropertyFilterType.DataWarehousePersonProperty]: TaxonomicFilterGroupType.DataWarehousePersonProperties,
}
export const PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE: Record<PropertyFilterType, TaxonomicFilterGroupType> =
{
[PropertyFilterType.Meta]: TaxonomicFilterGroupType.Metadata,
[PropertyFilterType.Person]: TaxonomicFilterGroupType.PersonProperties,
[PropertyFilterType.Event]: TaxonomicFilterGroupType.EventProperties,
[PropertyFilterType.Feature]: TaxonomicFilterGroupType.EventFeatureFlags,
[PropertyFilterType.Cohort]: TaxonomicFilterGroupType.Cohorts,
[PropertyFilterType.Element]: TaxonomicFilterGroupType.Elements,
[PropertyFilterType.Session]: TaxonomicFilterGroupType.SessionProperties,
[PropertyFilterType.HogQL]: TaxonomicFilterGroupType.HogQLExpression,
[PropertyFilterType.Group]: TaxonomicFilterGroupType.GroupsPrefix,
[PropertyFilterType.DataWarehouse]: TaxonomicFilterGroupType.DataWarehouse,
[PropertyFilterType.DataWarehousePersonProperty]: TaxonomicFilterGroupType.DataWarehousePersonProperties,
[PropertyFilterType.Recording]: TaxonomicFilterGroupType.Replay,
}
export function formatPropertyLabel(
item: Record<string, any>,
@@ -200,7 +199,7 @@ export function isElementPropertyFilter(filter?: AnyFilterLike | null): filter i
export function isSessionPropertyFilter(filter?: AnyFilterLike | null): filter is SessionPropertyFilter {
return filter?.type === PropertyFilterType.Session
}
export function isRecordingDurationFilter(filter?: AnyFilterLike | null): filter is RecordingDurationFilter {
export function isRecordingPropertyFilter(filter?: AnyFilterLike | null): filter is RecordingPropertyFilter {
return filter?.type === PropertyFilterType.Recording
}
export function isGroupPropertyFilter(filter?: AnyFilterLike | null): filter is GroupPropertyFilter {
@@ -223,7 +222,7 @@ export function isAnyPropertyfilter(filter?: AnyFilterLike | null): filter is An
isElementPropertyFilter(filter) ||
isSessionPropertyFilter(filter) ||
isCohortPropertyFilter(filter) ||
isRecordingDurationFilter(filter) ||
isRecordingPropertyFilter(filter) ||
isFeaturePropertyFilter(filter) ||
isGroupPropertyFilter(filter)
)
@@ -236,7 +235,7 @@ export function isPropertyFilterWithOperator(
| PersonPropertyFilter
| ElementPropertyFilter
| SessionPropertyFilter
| RecordingDurationFilter
| RecordingPropertyFilter
| FeaturePropertyFilter
| GroupPropertyFilter
| DataWarehousePropertyFilter {
@@ -246,7 +245,7 @@ export function isPropertyFilterWithOperator(
isPersonPropertyFilter(filter) ||
isElementPropertyFilter(filter) ||
isSessionPropertyFilter(filter) ||
isRecordingDurationFilter(filter) ||
isRecordingPropertyFilter(filter) ||
isFeaturePropertyFilter(filter) ||
isGroupPropertyFilter(filter) ||
isDataWarehousePropertyFilter(filter))
@@ -345,6 +344,10 @@ export function taxonomicFilterTypeToPropertyFilterType(
return PropertyFilterType.DataWarehousePersonProperty
}
if (filterType == TaxonomicFilterGroupType.Replay) {
return PropertyFilterType.Recording
}
return Object.entries(propertyFilterMapping).find(([, v]) => v === filterType)?.[0] as
| PropertyFilterType
| undefined

View File

@@ -39,10 +39,10 @@ function SocialLoginLink({ provider, extraQueryParams, children }: SocialLoginLi
interface SocialLoginButtonProps {
provider: SSOProvider
redirectQueryParams?: Record<string, string>
extraQueryParams?: Record<string, string>
}
export function SocialLoginButton({ provider, redirectQueryParams }: SocialLoginButtonProps): JSX.Element | null {
export function SocialLoginButton({ provider, extraQueryParams }: SocialLoginButtonProps): JSX.Element | null {
const { preflight } = useValues(preflightLogic)
if (!preflight?.available_social_auth_providers[provider]) {
@@ -50,7 +50,7 @@ export function SocialLoginButton({ provider, redirectQueryParams }: SocialLogin
}
return (
<SocialLoginLink provider={provider} extraQueryParams={redirectQueryParams}>
<SocialLoginLink provider={provider} extraQueryParams={extraQueryParams}>
<LemonButton size="medium" icon={<SocialLoginIcon provider={provider} />}>
<span className="text-default">{SSO_PROVIDER_NAMES[provider]}</span>
</LemonButton>
@@ -65,7 +65,7 @@ interface SocialLoginButtonsProps {
className?: string
topDivider?: boolean
bottomDivider?: boolean
redirectQueryParams?: Record<string, string>
extraQueryParams?: Record<string, string>
}
export function SocialLoginButtons({
@@ -109,14 +109,19 @@ export function SocialLoginButtons({
)
}
interface SSOEnforcedLoginButtonProps extends Partial<LemonButtonWithoutSideActionProps> {
provider: SSOProvider
email: string
}
type SSOEnforcedLoginButtonProps = SocialLoginButtonProps &
Partial<LemonButtonWithoutSideActionProps> & {
email: string
}
export function SSOEnforcedLoginButton({ provider, email, ...props }: SSOEnforcedLoginButtonProps): JSX.Element {
export function SSOEnforcedLoginButton({
provider,
email,
extraQueryParams,
...props
}: SSOEnforcedLoginButtonProps): JSX.Element {
return (
<SocialLoginLink provider={provider} extraQueryParams={{ email }}>
<SocialLoginLink provider={provider} extraQueryParams={{ ...extraQueryParams, email }}>
<LemonButton
className="btn-bridge"
data-attr="sso-login"

View File

@@ -253,6 +253,7 @@ export type SupportFormFields = {
target_area: SupportTicketTargetArea | null
severity_level: SupportTicketSeverityLevel | null
message: string
isEmailFormOpen?: boolean | 'true' | 'false'
}
export const supportLogic = kea<supportLogicType>([
@@ -340,13 +341,14 @@ export const supportLogic = kea<supportLogicType>([
values.sendSupportRequest.kind ?? '',
values.sendSupportRequest.target_area ?? '',
values.sendSupportRequest.severity_level ?? '',
values.isEmailFormOpen ?? 'false',
].join(':')
if (panelOptions !== ':') {
actions.setSidePanelOptions(panelOptions)
}
},
openSupportForm: async ({ name, email, kind, target_area, severity_level, message }) => {
openSupportForm: async ({ name, email, isEmailFormOpen, kind, target_area, severity_level, message }) => {
let area = target_area ?? getURLPathToTargetArea(window.location.pathname)
if (!userLogic.values.user) {
area = 'login'
@@ -361,6 +363,12 @@ export const supportLogic = kea<supportLogicType>([
message: message ?? '',
})
if (isEmailFormOpen === 'true' || isEmailFormOpen === true) {
actions.openEmailForm()
} else {
actions.closeEmailForm()
}
if (values.sidePanelAvailable) {
const panelOptions = [kind ?? '', area ?? ''].join(':')
actions.openSidePanel(SidePanelTab.Support, panelOptions === ':' ? undefined : panelOptions)
@@ -509,12 +517,13 @@ export const supportLogic = kea<supportLogicType>([
const [panel, ...panelOptions] = (hashParams['panel'] ?? '').split(':')
if (panel === SidePanelTab.Support) {
const [kind, area, severity] = panelOptions
const [kind, area, severity, isEmailFormOpen] = panelOptions
actions.openSupportForm({
kind: Object.keys(SUPPORT_KIND_TO_SUBJECT).includes(kind) ? kind : null,
target_area: Object.keys(TARGET_AREA_TO_NAME).includes(area) ? area : null,
severity_level: Object.keys(SEVERITY_LEVEL_TO_NAME).includes(severity) ? severity : null,
isEmailFormOpen: isEmailFormOpen ?? 'false',
})
return
}

View File

@@ -68,7 +68,7 @@ export function InfiniteSelectResults({
<RenderComponent
{...(activeTaxonomicGroup?.componentProps ?? {})}
value={value}
onChange={(newValue) => selectItem(activeTaxonomicGroup, newValue, newValue)}
onChange={(newValue, item) => selectItem(activeTaxonomicGroup, newValue, item)}
/>
) : (
<InfiniteList popupAnchorElement={popupAnchorElement} />

View File

@@ -5,7 +5,7 @@ import { AnyDataNode } from '~/queries/schema'
export interface InlineHogQLEditorProps {
value?: TaxonomicFilterValue
onChange: (value: TaxonomicFilterValue) => void
onChange: (value: TaxonomicFilterValue, item?: any) => void
metadataSource?: AnyDataNode
}

View File

@@ -22,6 +22,7 @@ import { dataWarehouseSceneLogic } from 'scenes/data-warehouse/external/dataWare
import { experimentsLogic } from 'scenes/experiments/experimentsLogic'
import { featureFlagsLogic } from 'scenes/feature-flags/featureFlagsLogic'
import { groupDisplayId } from 'scenes/persons/GroupActorDisplay'
import { ReplayTaxonomicFilters } from 'scenes/session-recordings/filters/ReplayTaxonomicFilters'
import { teamLogic } from 'scenes/teamLogic'
import { actionsModel } from '~/models/actionsModel'
@@ -506,6 +507,13 @@ export const taxonomicFilterLogic = kea<taxonomicFilterLogicType>([
getPopoverHeader: () => 'HogQL',
componentProps: { metadataSource },
},
{
name: 'Replay',
searchPlaceholder: 'Replay',
type: TaxonomicFilterGroupType.Replay,
render: ReplayTaxonomicFilters,
getPopoverHeader: () => 'Replay',
},
...groupAnalyticsTaxonomicGroups,
...groupAnalyticsTaxonomicGroupNames,
]

View File

@@ -47,7 +47,7 @@ export type TaxonomicFilterValue = string | number | null
export type TaxonomicFilterRender = (props: {
value?: TaxonomicFilterValue
onChange: (value: TaxonomicFilterValue) => void
onChange: (value: TaxonomicFilterValue, item: any) => void
}) => JSX.Element | null
export interface TaxonomicFilterGroup {
@@ -108,6 +108,8 @@ export enum TaxonomicFilterGroupType {
SessionProperties = 'session_properties',
HogQLExpression = 'hogql_expression',
Notebooks = 'notebooks',
// Misc
Replay = 'replay',
}
export interface InfiniteListLogicProps extends TaxonomicFilterLogicProps {

View File

@@ -21,6 +21,10 @@ export function TimeSensitiveAuthenticationModal(): JSX.Element {
const ssoEnforcement = precheckResponse?.sso_enforcement
const showPassword = !ssoEnforcement && user?.has_password
const extraQueryParams = {
next: window.location.pathname,
}
return (
<LemonModal
title="Re-authenticate account"
@@ -30,7 +34,12 @@ export function TimeSensitiveAuthenticationModal(): JSX.Element {
footer={
ssoEnforcement ? (
<span className="flex-1">
<SSOEnforcedLoginButton provider={ssoEnforcement} email={user!.email} size="medium" />
<SSOEnforcedLoginButton
provider={ssoEnforcement}
email={user!.email}
size="medium"
extraQueryParams={extraQueryParams}
/>
</span>
) : showPassword ? (
<LemonButton
@@ -84,12 +93,15 @@ export function TimeSensitiveAuthenticationModal(): JSX.Element {
<SocialLoginButtons
className="mt-4"
caption={showPassword ? 'Or re-authenticate with' : undefined}
redirectQueryParams={{
next: window.location.pathname,
}}
extraQueryParams={extraQueryParams}
/>
{precheckResponse?.saml_available ? (
<SSOEnforcedLoginButton provider="saml" email={user!.email} size="medium" />
<SSOEnforcedLoginButton
provider="saml"
email={user!.email}
size="medium"
extraQueryParams={extraQueryParams}
/>
) : null}
</div>
) : null}

View File

@@ -58,8 +58,9 @@ export const Default: StoryFn<typeof UniversalFilters> = ({ group }) => {
<UniversalFilters
rootKey={rootKey}
group={filterGroup}
taxonomicEntityFilterGroupTypes={[TaxonomicFilterGroupType.Events, TaxonomicFilterGroupType.Actions]}
taxonomicPropertyFilterGroupTypes={[
taxonomicGroupTypes={[
TaxonomicFilterGroupType.Events,
TaxonomicFilterGroupType.Actions,
TaxonomicFilterGroupType.Cohorts,
TaxonomicFilterGroupType.PersonProperties,
TaxonomicFilterGroupType.SessionProperties,

View File

@@ -26,8 +26,7 @@ type UniversalFiltersProps = {
rootKey: string
group: UniversalFiltersGroup | null
onChange: (group: UniversalFiltersGroup) => void
taxonomicEntityFilterGroupTypes: TaxonomicFilterGroupType[]
taxonomicPropertyFilterGroupTypes: TaxonomicFilterGroupType[]
taxonomicGroupTypes: TaxonomicFilterGroupType[]
children?: React.ReactNode
}
@@ -35,8 +34,7 @@ function UniversalFilters({
rootKey,
group = null,
onChange,
taxonomicEntityFilterGroupTypes,
taxonomicPropertyFilterGroupTypes,
taxonomicGroupTypes,
children,
}: UniversalFiltersProps): JSX.Element {
return (
@@ -46,8 +44,7 @@ function UniversalFilters({
rootKey,
group,
onChange,
taxonomicEntityFilterGroupTypes,
taxonomicPropertyFilterGroupTypes,
taxonomicGroupTypes,
}}
>
{children}
@@ -64,8 +61,7 @@ function Group({
index: number
children: React.ReactNode
}): JSX.Element {
const { rootKey, taxonomicEntityFilterGroupTypes, taxonomicPropertyFilterGroupTypes } =
useValues(universalFiltersLogic)
const { rootKey, taxonomicGroupTypes } = useValues(universalFiltersLogic)
const { replaceGroupValue } = useActions(universalFiltersLogic)
return (
@@ -74,8 +70,7 @@ function Group({
rootKey={`${rootKey}.group_${index}`}
group={group}
onChange={(group) => replaceGroupValue(index, group)}
taxonomicEntityFilterGroupTypes={taxonomicEntityFilterGroupTypes}
taxonomicPropertyFilterGroupTypes={taxonomicPropertyFilterGroupTypes}
taxonomicGroupTypes={taxonomicGroupTypes}
>
{children}
</UniversalFilters>

View File

@@ -32,8 +32,9 @@ describe('universalFiltersLogic', () => {
logic = universalFiltersLogic({
rootKey: 'test',
group: defaultFilter,
taxonomicEntityFilterGroupTypes: [TaxonomicFilterGroupType.Events, TaxonomicFilterGroupType.Actions],
taxonomicPropertyFilterGroupTypes: [
taxonomicGroupTypes: [
TaxonomicFilterGroupType.Events,
TaxonomicFilterGroupType.Actions,
TaxonomicFilterGroupType.EventProperties,
TaxonomicFilterGroupType.PersonProperties,
],

View File

@@ -6,7 +6,7 @@ import {
import { taxonomicFilterGroupTypeToEntityType } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow'
import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel'
import { ActionFilter, FilterLogicalOperator } from '~/types'
import { ActionFilter, FilterLogicalOperator, PropertyFilterType } from '~/types'
import { TaxonomicFilterGroup, TaxonomicFilterGroupType, TaxonomicFilterValue } from '../TaxonomicFilter/types'
import { UniversalFiltersGroup, UniversalFiltersGroupValue } from './UniversalFilters'
@@ -26,8 +26,7 @@ export type UniversalFiltersLogicProps = {
rootKey: string
group: UniversalFiltersGroup | null
onChange: (group: UniversalFiltersGroup) => void
taxonomicEntityFilterGroupTypes: TaxonomicFilterGroupType[]
taxonomicPropertyFilterGroupTypes: TaxonomicFilterGroupType[]
taxonomicGroupTypes: TaxonomicFilterGroupType[]
}
export const universalFiltersLogic = kea<universalFiltersLogicType>([
@@ -50,7 +49,11 @@ export const universalFiltersLogic = kea<universalFiltersLogicType>([
}),
removeGroupValue: (index: number) => ({ index }),
addGroupFilter: (taxonomicGroup: TaxonomicFilterGroup, propertyKey: TaxonomicFilterValue, item: any) => ({
addGroupFilter: (
taxonomicGroup: TaxonomicFilterGroup,
propertyKey: TaxonomicFilterValue,
item: { propertyFilterType?: PropertyFilterType; name?: string }
) => ({
taxonomicGroup,
propertyKey,
item,
@@ -83,11 +86,20 @@ export const universalFiltersLogic = kea<universalFiltersLogicType>([
selectors({
rootKey: [(_, p) => [p.rootKey], (rootKey) => rootKey],
taxonomicEntityFilterGroupTypes: [(_, p) => [p.taxonomicEntityFilterGroupTypes], (types) => types],
taxonomicPropertyFilterGroupTypes: [(_, p) => [p.taxonomicPropertyFilterGroupTypes], (types) => types],
taxonomicGroupTypes: [
(_, p) => [p.taxonomicEntityFilterGroupTypes, p.taxonomicPropertyFilterGroupTypes],
(entityTypes, propertyTypes) => [...entityTypes, ...propertyTypes],
taxonomicGroupTypes: [(_, p) => [p.taxonomicGroupTypes], (types) => types],
taxonomicPropertyFilterGroupTypes: [
(_, p) => [p.taxonomicGroupTypes],
(types) =>
types.filter((t) =>
[
TaxonomicFilterGroupType.EventProperties,
TaxonomicFilterGroupType.PersonProperties,
TaxonomicFilterGroupType.EventFeatureFlags,
TaxonomicFilterGroupType.Cohorts,
TaxonomicFilterGroupType.Elements,
TaxonomicFilterGroupType.HogQLExpression,
].includes(t)
),
],
}),
@@ -112,7 +124,7 @@ export const universalFiltersLogic = kea<universalFiltersLogicType>([
newValues.push(newPropertyFilter)
} else {
const entityType = item.PropertyFilterType ?? taxonomicFilterGroupTypeToEntityType(taxonomicGroup.type)
const entityType = taxonomicFilterGroupTypeToEntityType(taxonomicGroup.type)
if (entityType) {
const newEntityFilter: ActionFilter = {
id: propertyKey,

View File

@@ -167,6 +167,7 @@ export const FEATURE_FLAGS = {
APPS_AND_EXPORTS_UI: 'apps-and-exports-ui', // owner: @benjackwhite
HOGQL_INSIGHT_LIVE_COMPARE: 'hogql-insight-live-compare', // owner: @mariusandra
HOGQL_DASHBOARD_CARDS: 'hogql-dashboard-cards', // owner: @thmsobrmlr
HOGQL_DASHBOARD_ASYNC: 'hogql-dashboard-async', // owner: @webjunkie
WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline
PERSONS_HOGQL_QUERY: 'persons-hogql-query', // owner: @mariusandra
PIPELINE_UI: 'pipeline-ui', // owner: #team-pipeline
@@ -205,9 +206,13 @@ export const FEATURE_FLAGS = {
SESSION_REPLAY_NETWORK_VIEW: 'session-replay-network-view', // owner: #team-replay
SETTINGS_PERSONS_JOIN_MODE: 'settings-persons-join-mode', // owner: @robbie-c
HOG: 'hog', // owner: @mariusandra
HOG_FUNCTIONS: 'hog-functions', // owner: #team-cdp
PERSONLESS_EVENTS_NOT_SUPPORTED: 'personless-events-not-supported', // owner: @raquelmsmith
SESSION_REPLAY_UNIVERSAL_FILTERS: 'session-replay-universal-filters', // owner: #team-replay
ALERTS: 'alerts', // owner: github.com/nikitaevg
ERROR_TRACKING: 'error-tracking', // owner: #team-replay
SETTINGS_BOUNCE_RATE_PAGE_VIEW_MODE: 'settings-bounce-rate-page-view-mode', // owner: @robbie-c
SURVEYS_BRANCHING_LOGIC: 'surveys-branching-logic', // owner: @jurajmajerik #team-feature-success
} as const
export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS]

View File

@@ -1075,6 +1075,17 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = {
description: 'Specified group key',
},
},
replay: {
console_log_level: {
label: 'Log level',
description: 'Level of console logs captured',
examples: ['info', 'warn', 'error'],
},
console_log_query: {
label: 'Console log',
description: 'Text of console logs captured',
},
},
} satisfies Partial<Record<TaxonomicFilterGroupType, Record<string, CoreFilterDefinition>>>
CORE_FILTER_DEFINITIONS_BY_GROUP.numerical_event_properties = CORE_FILTER_DEFINITIONS_BY_GROUP.event_properties

View File

@@ -15,6 +15,5 @@ export function liveEventsHostOrigin(): string | null {
} else if (appOrigin === 'https://eu.posthog.com') {
return 'https://live.eu.posthog.com'
}
// TODO(@zach): add dev and local env support
return null
return 'http://localhost:8666'
}

View File

@@ -55,6 +55,7 @@ export type Option = {
label?: string
name?: string
status?: 'loading' | 'loaded'
allowCustomValues?: boolean
values?: PropValue[]
}
@@ -149,7 +150,11 @@ export const propertyDefinitionsModel = kea<propertyDefinitionsModelType>([
eventNames?: string[]
}) => payload,
setOptionsLoading: (key: string) => ({ key }),
setOptions: (key: string, values: PropValue[]) => ({ key, values }),
setOptions: (key: string, values: PropValue[], allowCustomValues: boolean) => ({
key,
values,
allowCustomValues,
}),
// internal
fetchAllPendingDefinitions: true,
abortAnyRunningQuery: true,
@@ -170,11 +175,12 @@ export const propertyDefinitionsModel = kea<propertyDefinitionsModelType>([
{} as Record<string, Option>,
{
setOptionsLoading: (state, { key }) => ({ ...state, [key]: { ...state[key], status: 'loading' } }),
setOptions: (state, { key, values }) => ({
setOptions: (state, { key, values, allowCustomValues }) => ({
...state,
[key]: {
values: [...Array.from(new Set(values))],
status: 'loaded',
allowCustomValues,
},
}),
},
@@ -317,6 +323,19 @@ export const propertyDefinitionsModel = kea<propertyDefinitionsModelType>([
if (!propertyKey || values.currentTeamId === null) {
return
}
if (propertyKey === 'console_log_level') {
actions.setOptions(
propertyKey,
[
// id is not used so can be arbitrarily chosen
{ id: 0, name: 'info' },
{ id: 1, name: 'warn' },
{ id: 2, name: 'error' },
],
false
)
return
}
const start = performance.now()
@@ -334,7 +353,7 @@ export const propertyDefinitionsModel = kea<propertyDefinitionsModelType>([
methodOptions
)
breakpoint()
actions.setOptions(propertyKey, propValues)
actions.setOptions(propertyKey, propValues, true)
cache.abortController = null
await captureTimeToSeeData(teamLogic.values.currentTeamId, {

View File

@@ -29,7 +29,8 @@
flex-direction: column;
.NotebookNode &,
.InsightCard & {
.InsightCard &,
.WebAnalyticsDashboard & {
flex: 1;
height: 100%;
@@ -102,7 +103,8 @@
}
.NotebookNode &,
.InsightCard & {
.InsightCard &,
.WebAnalyticsDashboard & {
.LineGraph {
position: relative;
min-height: 100px;
@@ -119,7 +121,8 @@
margin: 0.5rem;
.NotebookNode &,
.InsightCard & {
.InsightCard &,
.WebAnalyticsDashboard & {
min-height: auto;
}
@@ -149,7 +152,8 @@
min-height: var(--insight-viz-min-height);
.NotebookNode &,
.InsightCard & {
.InsightCard &,
.WebAnalyticsDashboard & {
min-height: auto;
}
}

View File

@@ -1,4 +1,4 @@
import { LemonSelect } from '@posthog/lemon-ui'
import { LemonButtonProps, LemonSelect } from '@posthog/lemon-ui'
import { FilterLogicalOperator } from '~/types'
@@ -8,6 +8,7 @@ interface AndOrFilterSelectProps {
topLevelFilter?: boolean
prefix?: React.ReactNode
suffix?: [singular: string, plural: string]
disabledReason?: LemonButtonProps['disabledReason']
}
export function AndOrFilterSelect({
@@ -16,6 +17,7 @@ export function AndOrFilterSelect({
topLevelFilter,
prefix = 'Match',
suffix = ['filter in this group', 'filters in this group'],
disabledReason,
}: AndOrFilterSelectProps): JSX.Element {
return (
<div className="flex items-center font-medium">
@@ -25,6 +27,7 @@ export function AndOrFilterSelect({
size="small"
value={value}
onChange={(type) => onChange(type as FilterLogicalOperator)}
disabledReason={disabledReason}
options={[
{
label: 'all',

View File

@@ -167,7 +167,7 @@ export function getQueryBasedInsightModel<T extends InputInsightModel>(insight:
let query
if (insight.query) {
query = insight.query
} else if (insight.filters && Object.keys(insight.filters).length > 0) {
} else if (insight.filters && Object.keys(insight.filters).filter((k) => k != 'filter_test_accounts').length > 0) {
query = { kind: NodeKind.InsightVizNode, source: filtersToQueryNode(insight.filters) } as InsightVizNode
} else {
query = null

View File

@@ -279,7 +279,7 @@
"$ref": "#/definitions/CohortPropertyFilter"
},
{
"$ref": "#/definitions/RecordingDurationFilter"
"$ref": "#/definitions/RecordingPropertyFilter"
},
{
"$ref": "#/definitions/GroupPropertyFilter"
@@ -2639,6 +2639,10 @@
"Day": {
"type": "integer"
},
"DurationType": {
"enum": ["duration", "active_seconds", "inactive_seconds"],
"type": "string"
},
"ElementPropertyFilter": {
"additionalProperties": false,
"description": "Sync with plugin-server/src/types.ts",
@@ -6964,12 +6968,23 @@
"required": ["k", "t"],
"type": "object"
},
"RecordingDurationFilter": {
"RecordingPropertyFilter": {
"additionalProperties": false,
"properties": {
"key": {
"const": "duration",
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/DurationType"
},
{
"const": "console_log_level",
"type": "string"
},
{
"const": "console_log_query",
"type": "string"
}
]
},
"label": {
"type": "string"
@@ -6982,10 +6997,10 @@
"type": "string"
},
"value": {
"type": "number"
"$ref": "#/definitions/PropertyFilterValue"
}
},
"required": ["key", "operator", "type", "value"],
"required": ["key", "operator", "type"],
"type": "object"
},
"RefreshType": {

View File

@@ -11,7 +11,6 @@ import { NodeKind } from '~/queries/schema'
import { ActionType } from '~/types'
import { ActionEdit } from './ActionEdit'
import { ActionPlugins } from './ActionPlugins'
export const scene: SceneExport = {
logic: actionLogic,
@@ -47,8 +46,6 @@ export function Action({ id }: { id?: ActionType['id'] } = {}): JSX.Element {
<ActionEdit id={id} action={action} />
{id && (
<>
<ActionPlugins />
{isComplete ? (
<div className="mt-8">
<h2 className="subtitle">Matching events</h2>

View File

@@ -1,67 +0,0 @@
import { LemonButton, LemonTable } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink'
import { useEffect } from 'react'
import { actionLogic } from 'scenes/actions/actionLogic'
import { PluginImage } from 'scenes/plugins/plugin/PluginImage'
import { urls } from 'scenes/urls'
import { PipelineNodeTab, PipelineStage } from '~/types'
export function ActionPlugins(): JSX.Element | null {
const { matchingPluginConfigs } = useValues(actionLogic)
const { loadMatchingPluginConfigs } = useActions(actionLogic)
useEffect(() => {
loadMatchingPluginConfigs()
}, [])
if (!matchingPluginConfigs?.length) {
return null
}
return (
<>
<h2 className="subtitle">Connected data pipelines</h2>
<LemonTable
dataSource={matchingPluginConfigs}
columns={[
{
title: 'Data pipeline',
render: (_, config) => (
<div className="flex items-center gap-2">
<PluginImage plugin={config.plugin_info} size="small" />
<LemonTableLink
title={config.name ?? config.plugin_info.name}
to={urls.pipelineNode(
PipelineStage.Destination,
config.id,
PipelineNodeTab.Configuration
)}
/>
</div>
),
},
{
title: '',
width: 0,
render: (_, config) => (
<LemonButton
type="secondary"
size="small"
to={urls.pipelineNode(
PipelineStage.Destination,
config.id,
PipelineNodeTab.Configuration
)}
>
Configure
</LemonButton>
),
},
]}
/>
</>
)
}

View File

@@ -93,7 +93,7 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem
label: 'Exception',
content: (
<div className="ml-10 my-2">
<ErrorDisplay event={event} />
<ErrorDisplay eventProperties={event.properties} />
</div>
),
})

View File

@@ -62,7 +62,7 @@ export function LiveEventsTable(): JSX.Element {
<span className="relative inline-flex rounded-full h-2.5 w-2.5 bg-danger" />
</span>
<span className="text-sm cursor-default">
Active users: <b>{stats?.users_on_product ?? '—'}</b>
Users active right now: <b>{stats?.users_on_product ?? '—'}</b>
</span>
</div>
</Tooltip>

View File

@@ -18,7 +18,6 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
addEvents: (events) => ({ events }),
clearEvents: true,
setFilters: (filters) => ({ filters }),
updateEventsSource: (source) => ({ source }),
updateEventsConnection: true,
pauseStream: true,
resumeStream: true,
@@ -54,12 +53,6 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
setClientSideFilters: (_, { clientSideFilters }) => clientSideFilters,
},
],
eventsSource: [
null as EventSource | null,
{
updateEventsSource: (_, { source }) => source,
},
],
streamPaused: [
false,
{
@@ -110,8 +103,8 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
actions.updateEventsConnection()
},
updateEventsConnection: async () => {
if (values.eventsSource) {
values.eventsSource.close()
if (cache.eventsSource) {
cache.eventsSource.close()
}
if (values.streamPaused) {
@@ -124,14 +117,13 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
const { eventType } = values.filters
const url = new URL(`${liveEventsHostOrigin()}/events`)
url.searchParams.append('teamId', values.currentTeam.id.toString())
if (eventType) {
url.searchParams.append('eventType', eventType)
}
const source = new window.EventSourcePolyfill(url.toString(), {
headers: {
Authorization: `Bearer ${values.currentTeam?.live_events_token}`,
Authorization: `Bearer ${values.currentTeam.live_events_token}`,
},
})
@@ -158,11 +150,11 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
}
}
actions.updateEventsSource(source)
cache.eventsSource = source
},
pauseStream: () => {
if (values.eventsSource) {
values.eventsSource.close()
if (cache.eventsSource) {
cache.eventsSource.close()
}
},
resumeStream: () => {
@@ -174,14 +166,11 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
return
}
const response = await fetch(
`${liveEventsHostOrigin()}/stats?teamId=${values.currentTeam.id.toString()}`,
{
headers: {
Authorization: `Bearer ${values.currentTeam?.live_events_token}`,
},
}
)
const response = await fetch(`${liveEventsHostOrigin()}/stats`, {
headers: {
Authorization: `Bearer ${values.currentTeam.live_events_token}`,
},
})
const data = await response.json()
actions.setStats(data)
} catch (error) {
@@ -189,21 +178,19 @@ export const liveEventsTableLogic = kea<liveEventsTableLogicType>([
}
},
})),
events(({ actions, values }) => ({
events(({ actions, cache }) => ({
afterMount: () => {
if (!liveEventsHostOrigin()) {
return
}
actions.updateEventsConnection()
const interval = setInterval(() => {
cache.statsInterval = setInterval(() => {
actions.pollStats()
}, 1500)
return () => {
if (values.eventsSource) {
values.eventsSource.close()
}
clearInterval(interval)
},
beforeUnmount: () => {
if (cache.eventsSource) {
cache.eventsSource.close()
}
if (cache.statsInterval) {
clearInterval(cache.statsInterval)
}
},
})),

View File

@@ -37,6 +37,7 @@ export const appScenes: Record<Scene, () => any> = {
[Scene.FeatureFlag]: () => import('./feature-flags/FeatureFlag'),
[Scene.EarlyAccessFeatures]: () => import('./early-access-features/EarlyAccessFeatures'),
[Scene.EarlyAccessFeature]: () => import('./early-access-features/EarlyAccessFeature'),
[Scene.ErrorTracking]: () => import('./error-tracking/ErrorTrackingScene'),
[Scene.Surveys]: () => import('./surveys/Surveys'),
[Scene.Survey]: () => import('./surveys/Survey'),
[Scene.SurveyTemplates]: () => import('./surveys/SurveyTemplates'),

View File

@@ -285,7 +285,7 @@ function UnauthenticatedAcceptInvite({ invite }: { invite: PrevalidatedInvite })
caption={`Remember to log in with ${invite?.target_email}`}
captionLocation="bottom"
topDivider
redirectQueryParams={invite ? { invite_id: invite.id } : undefined}
extraQueryParams={invite ? { invite_id: invite.id } : undefined}
/>
</BridgePage>
)

View File

@@ -97,7 +97,7 @@ export const BillingProductAddon = ({ addon }: { addon: BillingProductV2AddonTyp
{is_enhanced_persons_og_customer && (
<p className="mt-2 mb-0">
<Link
to="https://posthog.com/changelog/2024#person-profiles-addon"
to="https://posthog.com/changelog/2024#person-profiles-launched-posthog-now-up-to-80percent-cheaper"
className="italic"
target="_blank"
targetBlankIcon

View File

@@ -18,6 +18,7 @@ import api, { ApiMethodOptions, getJSONOrNull } from 'lib/api'
import {
AUTO_REFRESH_DASHBOARD_THRESHOLD_HOURS,
DashboardPrivilegeLevel,
FEATURE_FLAGS,
OrganizationMembershipLevel,
} from 'lib/constants'
import { Dayjs, dayjs, now } from 'lib/dayjs'
@@ -834,7 +835,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
},
],
})),
events(({ actions, cache, props }) => ({
events(({ actions, cache, props, values }) => ({
afterMount: () => {
if (props.id) {
if (props.dashboard) {
@@ -843,7 +844,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
actions.loadDashboardSuccess(props.dashboard)
} else {
actions.loadDashboard({
refresh: 'force_cache',
refresh: values.featureFlags[FEATURE_FLAGS.HOGQL_DASHBOARD_ASYNC] ? 'async' : 'force_cache',
action: 'initial_load',
})
}
@@ -966,7 +967,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
const insightsToRefresh = values
.sortTilesByLayout(tiles || values.insightTiles || [])
.filter((t) => {
if (!initialLoad || !t.last_refresh) {
if (!initialLoad || !t.last_refresh || !!t.insight?.query_status) {
return true
}
@@ -1016,7 +1017,13 @@ export const dashboardLogic = kea<dashboardLogicType>([
const queryId = `${dashboardQueryId}::${uuid()}`
const queryStartTime = performance.now()
const apiUrl = `api/projects/${values.currentTeamId}/insights/${insight.id}/?${toParams({
refresh: hardRefreshWithoutCache ? 'force_blocking' : 'blocking',
refresh: values.featureFlags[FEATURE_FLAGS.HOGQL_DASHBOARD_ASYNC]
? hardRefreshWithoutCache
? 'force_async'
: 'async'
: hardRefreshWithoutCache
? 'force_blocking'
: 'blocking',
from_dashboard: dashboardId, // needed to load insight in correct context
client_query_id: queryId,
session_id: currentSessionId(),
@@ -1056,29 +1063,22 @@ export const dashboardLogic = kea<dashboardLogicType>([
}
if (refreshedInsight.query_status) {
pollForResults(refreshedInsight.query_status.id, false, methodOptions)
.then(async () => {
const apiUrl = `api/projects/${values.currentTeamId}/insights/${insight.id}/?${toParams(
{
refresh: 'async',
from_dashboard: dashboardId, // needed to load insight in correct context
client_query_id: queryId,
session_id: currentSessionId(),
}
)}`
// TODO: We get the insight again here to get everything in the right format (e.g. because of result vs results)
const refreshedInsightResponse: Response = await api.getResponse(apiUrl, methodOptions)
const refreshedInsight: InsightModel = await getJSONOrNull(refreshedInsightResponse)
dashboardsModel.actions.updateDashboardInsight(
refreshedInsight,
[],
props.id ? [props.id] : undefined
)
actions.setRefreshStatus(insight.short_id)
})
.catch(() => {
actions.setRefreshError(insight.short_id)
})
await pollForResults(refreshedInsight.query_status.id, false, methodOptions)
const apiUrl = `api/projects/${values.currentTeamId}/insights/${insight.id}/?${toParams({
refresh: 'async',
from_dashboard: dashboardId, // needed to load insight in correct context
client_query_id: queryId,
session_id: currentSessionId(),
})}`
// TODO: We get the insight again here to get everything in the right format (e.g. because of result vs results)
const polledInsightResponse: Response = await api.getResponse(apiUrl, methodOptions)
const polledInsight: InsightModel = await getJSONOrNull(polledInsightResponse)
dashboardsModel.actions.updateDashboardInsight(
polledInsight,
[],
props.id ? [props.id] : undefined
)
actions.setRefreshStatus(insight.short_id)
} else {
actions.setRefreshStatus(insight.short_id)
}
@@ -1184,6 +1184,7 @@ export const dashboardLogic = kea<dashboardLogicType>([
// Initial load of actual data for dashboard items after general dashboard is fetched
if (
!values.featureFlags[FEATURE_FLAGS.HOGQL_DASHBOARD_ASYNC] && // with async we straight up want to loop through all items
values.oldestRefreshed &&
values.oldestRefreshed.isBefore(now().subtract(AUTO_REFRESH_DASHBOARD_THRESHOLD_HOURS, 'hours')) &&
!process.env.STORYBOOK // allow mocking of date in storybook without triggering refresh
@@ -1191,11 +1192,12 @@ export const dashboardLogic = kea<dashboardLogicType>([
actions.refreshAllDashboardItems({ action: 'refresh', initialLoad, dashboardQueryId })
allLoaded = false
} else {
const tilesWithNoResults = values.tiles?.filter((t) => !!t.insight && !t.insight.result) || []
const tilesWithNoOrQueuedResults =
values.tiles?.filter((t) => !!t.insight && (!t.insight.result || !!t.insight.query_status)) || []
if (tilesWithNoResults.length) {
if (tilesWithNoOrQueuedResults.length) {
actions.refreshAllDashboardItems({
tiles: tilesWithNoResults,
tiles: tilesWithNoOrQueuedResults,
action: 'load_missing',
initialLoad,
dashboardQueryId,

View File

@@ -184,7 +184,7 @@ export function ViewLinkForm(): JSX.Element {
</div>
</div>
<div className="mt-4 flex w-full">
<CodeSnippet style={{ width: '100%' }} language={Language.SQL}>
<CodeSnippet className="w-full" language={Language.SQL}>
{sqlCodeSnippet}
</CodeSnippet>
</div>

View File

@@ -1,5 +1,15 @@
import { TZLabel } from '@posthog/apps-common'
import { LemonButton, LemonDialog, LemonSwitch, LemonTable, LemonTag, Link, Spinner, Tooltip } from '@posthog/lemon-ui'
import {
LemonButton,
LemonDialog,
LemonSelect,
LemonSwitch,
LemonTable,
LemonTag,
Link,
Spinner,
Tooltip,
} from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
import { router } from 'kea-router'
import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction'
@@ -14,10 +24,10 @@ import { urls } from 'scenes/urls'
import { DataTableNode, NodeKind } from '~/queries/schema'
import {
DataWarehouseSyncInterval,
ExternalDataSourceSchema,
ExternalDataSourceType,
ExternalDataStripeSource,
PipelineInterval,
ProductKey,
} from '~/types'
@@ -33,7 +43,7 @@ const StatusTagSetting = {
export function DataWarehouseSourcesTable(): JSX.Element {
const { dataWarehouseSources, dataWarehouseSourcesLoading, sourceReloadingById } =
useValues(dataWarehouseSettingsLogic)
const { deleteSource, reloadSource } = useActions(dataWarehouseSettingsLogic)
const { deleteSource, reloadSource, updateSource } = useActions(dataWarehouseSettingsLogic)
const renderExpandable = (source: ExternalDataStripeSource): JSX.Element => {
return (
@@ -90,8 +100,20 @@ export function DataWarehouseSourcesTable(): JSX.Element {
{
title: 'Sync Frequency',
key: 'frequency',
render: function RenderFrequency() {
return 'day' as PipelineInterval
render: function RenderFrequency(_, source) {
return (
<LemonSelect
value={source.sync_frequency || 'day'}
onChange={(value) =>
updateSource({ ...source, sync_frequency: value as DataWarehouseSyncInterval })
}
options={[
{ value: 'day' as DataWarehouseSyncInterval, label: 'Daily' },
{ value: 'week' as DataWarehouseSyncInterval, label: 'Weekly' },
{ value: 'month' as DataWarehouseSyncInterval, label: 'Monthly' },
]}
/>
)
},
},
{

View File

@@ -25,7 +25,7 @@ export const dataWarehouseSettingsLogic = kea<dataWarehouseSettingsLogicType>([
updateSchema: (schema: ExternalDataSourceSchema) => ({ schema }),
abortAnyRunningQuery: true,
}),
loaders(({ cache, actions }) => ({
loaders(({ cache, actions, values }) => ({
dataWarehouseSources: [
null as PaginatedResponse<ExternalDataStripeSource> | null,
{
@@ -45,6 +45,15 @@ export const dataWarehouseSettingsLogic = kea<dataWarehouseSettingsLogicType>([
return res
},
updateSource: async (source: ExternalDataStripeSource) => {
const updatedSource = await api.externalDataSources.update(source.id, source)
return {
...values.dataWarehouseSources,
results:
values.dataWarehouseSources?.results.map((s) => (s.id === updatedSource.id ? source : s)) ||
[],
}
},
},
],
})),

View File

@@ -1,4 +1,4 @@
import { IconCollapse } from '@posthog/icons'
import { IconCollapse, IconGear } from '@posthog/icons'
import { LemonBanner, LemonButton, LemonInputSelect, LemonSkeleton, Spinner, Tooltip } from '@posthog/lemon-ui'
import { BindLogic, useActions, useValues } from 'kea'
import { AuthorizedUrlList } from 'lib/components/AuthorizedUrlList/AuthorizedUrlList'
@@ -10,6 +10,9 @@ import { DetectiveHog } from 'lib/components/hedgehogs'
import { useResizeObserver } from 'lib/hooks/useResizeObserver'
import { IconChevronRight, IconOpenInNew } from 'lib/lemon-ui/icons'
import React, { useEffect, useRef } from 'react'
import { teamLogic } from 'scenes/teamLogic'
import { sidePanelSettingsLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic'
import { heatmapsBrowserLogic } from './heatmapsBrowserLogic'
@@ -260,6 +263,28 @@ function EmbeddedHeatmapBrowser({
) : null
}
function Warnings(): JSX.Element | null {
const { currentTeam } = useValues(teamLogic)
const heatmapsEnabled = currentTeam?.heatmaps_opt_in
const { openSettingsPanel } = useActions(sidePanelSettingsLogic)
return !heatmapsEnabled ? (
<LemonBanner
type="warning"
action={{
type: 'secondary',
icon: <IconGear />,
onClick: () => openSettingsPanel({ settingId: 'heatmaps' }),
children: 'Configure',
}}
dismissKey="heatmaps-might-be-disabled-warning"
>
You aren't collecting heatmaps data. Enable heatmaps in your project.
</LemonBanner>
) : null
}
export function HeatmapsBrowser(): JSX.Element {
const iframeRef = useRef<HTMLIFrameElement | null>(null)
@@ -271,7 +296,8 @@ export function HeatmapsBrowser(): JSX.Element {
return (
<BindLogic logic={heatmapsBrowserLogic} props={logicProps}>
<div className="flex flex-wrap gap-2">
<div className="flex flex-col gap-2">
<Warnings />
<div className="flex flex-col overflow-hidden w-full h-[90vh] rounded border">
<UrlSearchHeader />

View File

@@ -35,7 +35,7 @@ import posthog from 'posthog-js'
export default defineNuxtPlugin(nuxtApp => {
const runtimeConfig = useRuntimeConfig();
const posthogClient = posthog.init(runtimeConfig.public.posthogPublicKey, {
api_host: runtimeConfig.public.posthogHost',
api_host: runtimeConfig.public.posthogHost,
${
isPersonProfilesDisabled
? ``

View File

@@ -57,10 +57,14 @@ export function DestinationsTable({ inOverview = false }: { inOverview?: boolean
title: 'App',
width: 0,
render: function RenderAppInfo(_, destination) {
if (destination.backend === 'plugin') {
return <RenderApp plugin={destination.plugin} />
switch (destination.backend) {
case 'plugin':
return <RenderApp plugin={destination.plugin} />
case 'batch_export':
return <RenderBatchExportIcon type={destination.service.type} />
default:
return null
}
return <RenderBatchExportIcon type={destination.service.type} />
},
},
{

View File

@@ -1,6 +1,7 @@
import { useValues } from 'kea'
import { NotFound } from 'lib/components/NotFound'
import { PipelineHogFunctionConfiguration } from './hogfunctions/PipelineHogFunctionConfiguration'
import { PipelineBatchExportConfiguration } from './PipelineBatchExportConfiguration'
import { pipelineNodeLogic } from './pipelineNodeLogic'
import { PipelinePluginConfiguration } from './PipelinePluginConfiguration'
@@ -15,7 +16,9 @@ export function PipelineNodeConfiguration(): JSX.Element {
return (
<div className="space-y-3">
{node.backend === PipelineBackend.Plugin ? (
{node.backend === PipelineBackend.HogFunction ? (
<PipelineHogFunctionConfiguration id={node.id} />
) : node.backend === PipelineBackend.Plugin ? (
<PipelinePluginConfiguration stage={stage} pluginConfigId={node.id} />
) : (
<PipelineBatchExportConfiguration id={node.id} />

View File

@@ -1,17 +1,20 @@
import { IconPlusSmall } from '@posthog/icons'
import { useValues } from 'kea'
import { combineUrl, router } from 'kea-router'
import { NotFound } from 'lib/components/NotFound'
import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini'
import { useFeatureFlag } from 'lib/hooks/useFeatureFlag'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { LemonTable } from 'lib/lemon-ui/LemonTable'
import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink'
import { SceneExport } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
import { AvailableFeature, BatchExportService, PipelineStage, PluginType } from '~/types'
import { AvailableFeature, BatchExportService, HogFunctionTemplateType, PipelineStage, PluginType } from '~/types'
import { pipelineDestinationsLogic } from './destinationsLogic'
import { frontendAppsLogic } from './frontendAppsLogic'
import { PipelineHogFunctionConfiguration } from './hogfunctions/PipelineHogFunctionConfiguration'
import { PipelineBatchExportConfiguration } from './PipelineBatchExportConfiguration'
import { PIPELINE_TAB_TO_NODE_STAGE } from './PipelineNode'
import { pipelineNodeNewLogic, PipelineNodeNewLogicProps } from './pipelineNodeNewLogic'
@@ -21,21 +24,20 @@ import { PipelineBackend } from './types'
import { getBatchExportUrl, RenderApp, RenderBatchExportIcon } from './utils'
const paramsToProps = ({
params: { stage, pluginIdOrBatchExportDestination },
params: { stage, id },
}: {
params: { stage?: string; pluginIdOrBatchExportDestination?: string }
params: { stage?: string; id?: string }
}): PipelineNodeNewLogicProps => {
const numericId =
pluginIdOrBatchExportDestination && /^\d+$/.test(pluginIdOrBatchExportDestination)
? parseInt(pluginIdOrBatchExportDestination)
: undefined
const numericId = id && /^\d+$/.test(id) ? parseInt(id) : undefined
const pluginId = numericId && !isNaN(numericId) ? numericId : null
const batchExportDestination = pluginId ? null : pluginIdOrBatchExportDestination ?? null
const hogFunctionId = pluginId ? null : id?.startsWith('hog-') ? id.slice(4) : null
const batchExportDestination = hogFunctionId ? null : id ?? null
return {
stage: PIPELINE_TAB_TO_NODE_STAGE[stage + 's'] || null, // pipeline tab has stage plural here we have singular
pluginId: pluginId,
batchExportDestination: batchExportDestination,
pluginId,
batchExportDestination,
hogFunctionId,
}
}
@@ -45,32 +47,22 @@ export const scene: SceneExport = {
paramsToProps,
}
type PluginEntry = {
backend: PipelineBackend.Plugin
id: number
type TableEntry = {
backend: PipelineBackend
id: string | number
name: string
description: string
plugin: PluginType
url?: string
icon: JSX.Element
}
type BatchExportEntry = {
backend: PipelineBackend.BatchExport
id: BatchExportService['type']
name: string
description: string
url: string
}
type TableEntry = PluginEntry | BatchExportEntry
function convertPluginToTableEntry(plugin: PluginType): TableEntry {
return {
backend: PipelineBackend.Plugin,
id: plugin.id,
name: plugin.name,
description: plugin.description || '',
plugin: plugin,
icon: <RenderApp plugin={plugin} />,
// TODO: ideally we'd link to docs instead of GitHub repo, so it can open in panel
// Same for transformations and destinations tables
url: plugin.url,
@@ -80,17 +72,26 @@ function convertPluginToTableEntry(plugin: PluginType): TableEntry {
function convertBatchExportToTableEntry(service: BatchExportService['type']): TableEntry {
return {
backend: PipelineBackend.BatchExport,
id: service,
id: service as string,
name: service,
description: `${service} batch export`,
icon: <RenderBatchExportIcon type={service} />,
url: getBatchExportUrl(service),
}
}
export function PipelineNodeNew(
params: { stage?: string; pluginIdOrBatchExportDestination?: string } = {}
): JSX.Element {
const { stage, pluginId, batchExportDestination } = paramsToProps({ params })
function convertHogFunctionToTableEntry(hogFunction: HogFunctionTemplateType): TableEntry {
return {
backend: PipelineBackend.HogFunction,
id: `hog-${hogFunction.id}`, // TODO: This weird identifier thing isn't great
name: hogFunction.name,
description: hogFunction.description,
icon: <span>🦔</span>,
}
}
export function PipelineNodeNew(params: { stage?: string; id?: string } = {}): JSX.Element {
const { stage, pluginId, batchExportDestination, hogFunctionId } = paramsToProps({ params })
if (!stage) {
return <NotFound object="pipeline app stage" />
@@ -103,6 +104,7 @@ export function PipelineNodeNew(
}
return res
}
if (batchExportDestination) {
if (stage !== PipelineStage.Destination) {
return <NotFound object={batchExportDestination} />
@@ -114,6 +116,14 @@ export function PipelineNodeNew(
)
}
if (hogFunctionId) {
const res = <PipelineHogFunctionConfiguration templateId={hogFunctionId} />
if (stage === PipelineStage.Destination) {
return <PayGateMini feature={AvailableFeature.DATA_PIPELINES}>{res}</PayGateMini>
}
return res
}
if (stage === PipelineStage.Transformation) {
return <TransformationOptionsTable />
} else if (stage === PipelineStage.Destination) {
@@ -135,11 +145,15 @@ function TransformationOptionsTable(): JSX.Element {
}
function DestinationOptionsTable(): JSX.Element {
const hogFunctionsEnabled = !!useFeatureFlag('HOG_FUNCTIONS')
const { batchExportServiceNames } = useValues(pipelineNodeNewLogic)
const { plugins, loading } = useValues(pipelineDestinationsLogic)
const { plugins, loading, hogFunctionTemplates } = useValues(pipelineDestinationsLogic)
const pluginTargets = Object.values(plugins).map(convertPluginToTableEntry)
const batchExportTargets = Object.values(batchExportServiceNames).map(convertBatchExportToTableEntry)
const targets = [...batchExportTargets, ...pluginTargets]
const hogFunctionTargets = hogFunctionsEnabled
? Object.values(hogFunctionTemplates).map(convertHogFunctionToTableEntry)
: []
const targets = [...batchExportTargets, ...pluginTargets, ...hogFunctionTargets]
return <NodeOptionsTable stage={PipelineStage.Destination} targets={targets} loading={loading} />
}
@@ -158,6 +172,7 @@ function NodeOptionsTable({
targets: TableEntry[]
loading: boolean
}): JSX.Element {
const { hashParams } = useValues(router)
return (
<>
<LemonTable
@@ -169,10 +184,7 @@ function NodeOptionsTable({
title: 'App',
width: 0,
render: function RenderAppInfo(_, target) {
if (target.backend === PipelineBackend.Plugin) {
return <RenderApp plugin={target.plugin} />
}
return <RenderBatchExportIcon type={target.id} />
return target.icon
},
},
{
@@ -198,7 +210,8 @@ function NodeOptionsTable({
type="primary"
data-attr={`new-${stage}-${target.id}`}
icon={<IconPlusSmall />}
to={urls.pipelineNodeNew(stage, target.id)}
// Preserve hash params to pass config in
to={combineUrl(urls.pipelineNodeNew(stage, target.id), {}, hashParams).url}
>
Create
</LemonButton>

View File

@@ -8,6 +8,8 @@ import { userLogic } from 'scenes/userLogic'
import {
BatchExportConfiguration,
HogFunctionTemplateType,
HogFunctionType,
PipelineStage,
PluginConfigTypeNew,
PluginConfigWithPluginInfoNew,
@@ -16,6 +18,7 @@ import {
} from '~/types'
import type { pipelineDestinationsLogicType } from './destinationsLogicType'
import { HOG_FUNCTION_TEMPLATES } from './hogfunctions/templates/hog-templates'
import { pipelineAccessLogic } from './pipelineAccessLogic'
import { BatchExportDestination, convertToPipelineNode, Destination, PipelineBackend } from './types'
import { captureBatchExportEvent, capturePluginEvent, loadPluginsFromUrl } from './utils'
@@ -116,28 +119,68 @@ export const pipelineDestinationsLogic = kea<pipelineDestinationsLogicType>([
},
},
],
hogFunctionTemplates: [
{} as Record<string, HogFunctionTemplateType>,
{
loadHogFunctionTemplates: async () => {
return HOG_FUNCTION_TEMPLATES.reduce((acc, template) => {
acc[template.id] = template
return acc
}, {} as Record<string, HogFunctionTemplateType>)
},
},
],
hogFunctions: [
[] as HogFunctionType[],
{
loadHogFunctions: async () => {
// TODO: Support pagination?
return (await api.hogFunctions.list()).results
},
},
],
})),
selectors({
loading: [
(s) => [s.pluginsLoading, s.pluginConfigsLoading, s.batchExportConfigsLoading],
(pluginsLoading, pluginConfigsLoading, batchExportConfigsLoading) =>
pluginsLoading || pluginConfigsLoading || batchExportConfigsLoading,
(s) => [
s.pluginsLoading,
s.pluginConfigsLoading,
s.batchExportConfigsLoading,
s.hogFunctionTemplatesLoading,
s.hogFunctionsLoading,
],
(
pluginsLoading,
pluginConfigsLoading,
batchExportConfigsLoading,
hogFunctionTemplatesLoading,
hogFunctionsLoading
) =>
pluginsLoading ||
pluginConfigsLoading ||
batchExportConfigsLoading ||
hogFunctionTemplatesLoading ||
hogFunctionsLoading,
],
destinations: [
(s) => [s.pluginConfigs, s.plugins, s.batchExportConfigs, s.user],
(pluginConfigs, plugins, batchExportConfigs, user): Destination[] => {
(s) => [s.pluginConfigs, s.plugins, s.batchExportConfigs, s.hogFunctions, s.user],
(pluginConfigs, plugins, batchExportConfigs, hogFunctions, user): Destination[] => {
// Migrations are shown only in impersonation mode, for us to be able to trigger them.
const rawBatchExports = Object.values(batchExportConfigs).filter(
(config) => config.destination.type !== 'HTTP' || user?.is_impersonated
)
const rawDestinations: (PluginConfigWithPluginInfoNew | BatchExportConfiguration)[] = Object.values(
pluginConfigs
)
.map<PluginConfigWithPluginInfoNew | BatchExportConfiguration>((pluginConfig) => ({
...pluginConfig,
plugin_info: plugins[pluginConfig.plugin] || null,
}))
.concat(rawBatchExports)
const rawDestinations: (PluginConfigWithPluginInfoNew | BatchExportConfiguration | HogFunctionType)[] =
Object.values(pluginConfigs)
.map<PluginConfigWithPluginInfoNew | BatchExportConfiguration | HogFunctionType>(
(pluginConfig) => ({
...pluginConfig,
plugin_info: plugins[pluginConfig.plugin] || null,
})
)
.concat(rawBatchExports)
.concat(hogFunctions)
const convertedDestinations = rawDestinations.map((d) =>
convertToPipelineNode(d, PipelineStage.Destination)
)
@@ -183,5 +226,7 @@ export const pipelineDestinationsLogic = kea<pipelineDestinationsLogicType>([
actions.loadPlugins()
actions.loadPluginConfigs()
actions.loadBatchExports()
actions.loadHogFunctionTemplates()
actions.loadHogFunctions()
}),
])

View File

@@ -24,7 +24,11 @@ type BatchExportNodeId = {
backend: PipelineBackend.BatchExport
id: string
}
export type PipelineNodeLimitedType = PluginNodeId | BatchExportNodeId
type HogFunctionNodeId = {
backend: PipelineBackend.HogFunction
id: string
}
export type PipelineNodeLimitedType = PluginNodeId | BatchExportNodeId | HogFunctionNodeId
export const pipelineNodeLogic = kea<pipelineNodeLogicType>([
props({} as PipelineNodeLogicProps),
@@ -61,18 +65,23 @@ export const pipelineNodeLogic = kea<pipelineNodeLogicType>([
},
],
],
nodeBackend: [
(s) => [s.node],
(node): PipelineBackend => {
return node.backend
},
],
node: [
(_, p) => [p.id],
(id): PipelineNodeLimitedType => {
return typeof id === 'string'
? { backend: PipelineBackend.BatchExport, id: id }
: { backend: PipelineBackend.Plugin, id: id }
? id.indexOf('hog-') === 0
? { backend: PipelineBackend.HogFunction, id: `${id}`.replace('hog-', '') }
: { backend: PipelineBackend.BatchExport, id }
: { backend: PipelineBackend.Plugin, id }
},
],
nodeBackend: [
(_, p) => [p.id],
(id): PipelineBackend => (typeof id === 'string' ? PipelineBackend.BatchExport : PipelineBackend.Plugin),
],
tabs: [
(s) => [s.nodeBackend],
(nodeBackend) => {

View File

@@ -18,6 +18,7 @@ export interface PipelineNodeNewLogicProps {
stage: PipelineStage | null
pluginId: number | null
batchExportDestination: string | null
hogFunctionId: string | null
}
export const pipelineNodeNewLogic = kea<pipelineNodeNewLogicType>([
@@ -25,12 +26,7 @@ export const pipelineNodeNewLogic = kea<pipelineNodeNewLogicType>([
connect({
values: [userLogic, ['user']],
}),
path((pluginIdOrBatchExportDestination) => [
'scenes',
'pipeline',
'pipelineNodeNewLogic',
pluginIdOrBatchExportDestination,
]),
path((id) => ['scenes', 'pipeline', 'pipelineNodeNewLogic', id]),
actions({
createNewButtonPressed: (stage: PipelineStage, id: number | BatchExportService['type']) => ({ stage, id }),
}),

View File

@@ -1,6 +1,7 @@
import {
BatchExportConfiguration,
BatchExportService,
HogFunctionType,
PipelineStage,
PluginConfigWithPluginInfoNew,
PluginType,
@@ -9,6 +10,7 @@ import {
export enum PipelineBackend {
BatchExport = 'batch_export',
Plugin = 'plugin',
HogFunction = 'hog_function',
}
// Base - we're taking a discriminated union approach here, so that TypeScript can discern types for free
@@ -39,6 +41,11 @@ export interface BatchExportBasedNode extends PipelineNodeBase {
interval: BatchExportConfiguration['interval']
}
export interface HogFunctionBasedNode extends PipelineNodeBase {
backend: PipelineBackend.HogFunction
id: string
}
// Stage: Transformations
export interface Transformation extends PluginBasedNode {
@@ -55,7 +62,11 @@ export interface WebhookDestination extends PluginBasedNode {
export interface BatchExportDestination extends BatchExportBasedNode {
stage: PipelineStage.Destination
}
export type Destination = BatchExportDestination | WebhookDestination
export interface FunctionDestination extends HogFunctionBasedNode {
stage: PipelineStage.Destination
interval: 'realtime'
}
export type Destination = BatchExportDestination | WebhookDestination | FunctionDestination
export interface DataImportApp extends PluginBasedNode {
stage: PipelineStage.DataImport
@@ -84,7 +95,7 @@ function isPluginConfig(
}
export function convertToPipelineNode<S extends PipelineStage>(
candidate: PluginConfigWithPluginInfoNew | BatchExportConfiguration,
candidate: PluginConfigWithPluginInfoNew | BatchExportConfiguration | HogFunctionType,
stage: S
): S extends PipelineStage.Transformation
? Transformation
@@ -98,7 +109,20 @@ export function convertToPipelineNode<S extends PipelineStage>(
? ImportApp
: never {
let node: PipelineNode
if (isPluginConfig(candidate)) {
// check if type is a hog function
if ('hog' in candidate) {
node = {
stage: stage as PipelineStage.Destination,
backend: PipelineBackend.HogFunction,
interval: 'realtime',
id: `hog-${candidate.id}`,
name: candidate.name,
description: candidate.description,
enabled: candidate.enabled,
created_at: candidate.created_at,
updated_at: candidate.created_at,
}
} else if (isPluginConfig(candidate)) {
const almostNode: Omit<
Transformation | WebhookDestination | SiteApp | ImportApp | DataImportApp,
'frequency' | 'order'

View File

@@ -9,6 +9,7 @@ export enum Scene {
Error404 = '404',
ErrorNetwork = '4xx',
ErrorProjectUnavailable = 'ProjectUnavailable',
ErrorTracking = 'ErrorTracking',
Dashboards = 'Dashboards',
Dashboard = 'Dashboard',
Insight = 'Insight',

View File

@@ -53,6 +53,10 @@ export const sceneConfigurations: Record<Scene, SceneConfig> = {
activityScope: ActivityScope.DASHBOARD,
defaultDocsPath: '/docs/product-analytics/dashboards',
},
[Scene.ErrorTracking]: {
projectBased: true,
name: 'Error tracking',
},
[Scene.Insight]: {
projectBased: true,
name: 'Insights',
@@ -408,7 +412,6 @@ export const sceneConfigurations: Record<Scene, SceneConfig> = {
[Scene.Heatmaps]: {
projectBased: true,
name: 'Heatmaps',
hideProjectNotice: true,
},
}
@@ -529,7 +532,7 @@ export const routes: Record<string, Scene> = {
[urls.persons()]: Scene.PersonsManagement,
[urls.pipelineNodeDataWarehouseNew()]: Scene.pipelineNodeDataWarehouseNew,
[urls.pipelineNodeNew(':stage')]: Scene.PipelineNodeNew,
[urls.pipelineNodeNew(':stage', ':pluginIdOrBatchExportDestination')]: Scene.PipelineNodeNew,
[urls.pipelineNodeNew(':stage', ':id')]: Scene.PipelineNodeNew,
[urls.pipeline(':tab')]: Scene.Pipeline,
[urls.pipelineNode(':stage', ':id', ':nodeTab')]: Scene.PipelineNode,
[urls.groups(':groupTypeIndex')]: Scene.PersonsManagement,
@@ -541,6 +544,7 @@ export const routes: Record<string, Scene> = {
[urls.experiment(':id')]: Scene.Experiment,
[urls.earlyAccessFeatures()]: Scene.EarlyAccessFeatures,
[urls.earlyAccessFeature(':id')]: Scene.EarlyAccessFeature,
[urls.errorTracking()]: Scene.ErrorTracking,
[urls.surveys()]: Scene.Surveys,
[urls.survey(':id')]: Scene.Survey,
[urls.surveyTemplates()]: Scene.SurveyTemplates,

View File

@@ -47,7 +47,7 @@
.PlayerSeekbar__currentbar {
z-index: 3;
background-color: var(--recording-seekbar-red);
background-color: var(--primary-3000);
border-radius: var(--bar-height) 0 0 var(--bar-height);
}
@@ -76,7 +76,7 @@
width: var(--thumb-size);
height: var(--thumb-size);
margin-top: calc(var(--thumb-size) / 2 * -1);
background-color: var(--recording-seekbar-red);
background-color: var(--primary-3000);
border: 2px solid var(--bg-light);
border-radius: 50%;
transition: top 150ms ease-in-out;

View File

@@ -68,7 +68,7 @@ export function ItemEvent({ item, expanded, setExpanded }: ItemEventProps): JSX.
{item.data.fullyLoaded ? (
item.data.event === '$exception' ? (
<ErrorDisplay event={item.data} />
<ErrorDisplay eventProperties={item.data.properties} />
) : (
<SimpleKeyValueList item={item.data.properties} />
)

View File

@@ -1,5 +1,6 @@
import { actions, kea, listeners, path, reducers, selectors } from 'kea'
import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea'
import { eventUsageLogic } from 'lib/utils/eventUsageLogic'
import { teamLogic } from 'scenes/teamLogic'
import { AutoplayDirection, DurationType, SessionRecordingPlayerTab } from '~/types'
@@ -191,7 +192,10 @@ export const playerSettingsLogic = kea<playerSettingsLogicType>([
setQuickFilterProperties: (properties: string[]) => ({ properties }),
setTimestampFormat: (format: TimestampFormat) => ({ format }),
}),
reducers(() => ({
connect({
values: [teamLogic, ['currentTeam']],
}),
reducers(({ values }) => ({
showFilters: [
true,
{
@@ -211,7 +215,7 @@ export const playerSettingsLogic = kea<playerSettingsLogicType>([
},
],
quickFilterProperties: [
['$geoip_country_name'] as string[],
['$geoip_country_name', ...(values.currentTeam?.person_display_name_properties || [])] as string[],
{
persist: true,
},

View File

@@ -23,6 +23,7 @@ import { urls } from 'scenes/urls'
import { ReplayTabs, SessionRecordingType } from '~/types'
import { RecordingsUniversalFilters } from '../filters/RecordingsUniversalFilters'
import { SessionRecordingsFilters } from '../filters/SessionRecordingsFilters'
import { SessionRecordingPlayer } from '../player/SessionRecordingPlayer'
import { SessionRecordingPreview, SessionRecordingPreviewSkeleton } from './SessionRecordingPreview'
@@ -118,6 +119,7 @@ function RecordingsLists(): JSX.Element {
recordingsCount,
isRecordingsListCollapsed,
sessionSummaryLoading,
useUniversalFiltering,
} = useValues(sessionRecordingsPlaylistLogic)
const {
setSelectedRecordingId,
@@ -205,25 +207,27 @@ function RecordingsLists(): JSX.Element {
</span>
</Tooltip>
</span>
<LemonButton
tooltip="Filter recordings"
size="small"
active={showFilters}
icon={
<IconWithCount count={totalFiltersCount}>
<IconFilter />
</IconWithCount>
}
onClick={() => {
if (notebookNode) {
notebookNode.actions.toggleEditing()
} else {
setShowFilters(!showFilters)
{(!useUniversalFiltering || notebookNode) && (
<LemonButton
tooltip="Filter recordings"
size="small"
active={showFilters}
icon={
<IconWithCount count={totalFiltersCount}>
<IconFilter />
</IconWithCount>
}
}}
>
Filter
</LemonButton>
onClick={() => {
if (notebookNode) {
notebookNode.actions.toggleEditing()
} else {
setShowFilters(!showFilters)
}
}}
>
Filter
</LemonButton>
)}
<LemonButton
tooltip="Playlist settings"
size="small"
@@ -352,6 +356,7 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr
matchingEventsMatchType,
pinnedRecordings,
isRecordingsListCollapsed,
useUniversalFiltering,
} = useValues(logic)
const { toggleRecordingsListCollapsed } = useActions(logic)
@@ -363,8 +368,10 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr
const notebookNode = useNotebookNode()
return (
<>
<BindLogic logic={sessionRecordingsPlaylistLogic} props={logicProps}>
<BindLogic logic={sessionRecordingsPlaylistLogic} props={logicProps}>
<div className="h-full space-y-2">
{useUniversalFiltering && <RecordingsUniversalFilters />}
<div
ref={playlistRef}
data-attr="session-recordings-playlist"
@@ -422,7 +429,7 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr
)}
</div>
</div>
</BindLogic>
</>
</div>
</BindLogic>
)
}

View File

@@ -4,6 +4,10 @@ import { loaders } from 'kea-loaders'
import { actionToUrl, router, urlToAction } from 'kea-router'
import { subscriptions } from 'kea-subscriptions'
import api from 'lib/api'
import { isAnyPropertyfilter } from 'lib/components/PropertyFilters/utils'
import { UniversalFiltersGroup, UniversalFilterValue } from 'lib/components/UniversalFilters/UniversalFilters'
import { DEFAULT_UNIVERSAL_GROUP_FILTER } from 'lib/components/UniversalFilters/universalFiltersLogic'
import { isActionFilter, isEventFilter } from 'lib/components/UniversalFilters/utils'
import { FEATURE_FLAGS } from 'lib/constants'
import { now } from 'lib/dayjs'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
@@ -12,11 +16,15 @@ import { eventUsageLogic } from 'lib/utils/eventUsageLogic'
import posthog from 'posthog-js'
import {
AnyPropertyFilter,
DurationType,
FilterableLogLevel,
FilterType,
PropertyFilterType,
PropertyOperator,
RecordingDurationFilter,
RecordingFilters,
RecordingUniversalFilters,
ReplayTabs,
SessionRecordingId,
SessionRecordingsResponse,
@@ -85,6 +93,14 @@ export const DEFAULT_RECORDING_FILTERS: RecordingFilters = {
console_search_query: '',
}
export const DEFAULT_RECORDING_UNIVERSAL_FILTERS: RecordingUniversalFilters = {
live_mode: false,
filter_test_accounts: false,
date_from: '-3d',
filter_group: { ...DEFAULT_UNIVERSAL_GROUP_FILTER },
duration: [defaultRecordingDurationFilter],
}
const DEFAULT_PERSON_RECORDING_FILTERS: RecordingFilters = {
...DEFAULT_RECORDING_FILTERS,
date_from: '-30d',
@@ -106,6 +122,47 @@ const capturePartialFilters = (filters: Partial<RecordingFilters>): void => {
...partialFilters,
})
}
function convertUniversalFiltersToLegacyFilters(universalFilters: RecordingUniversalFilters): RecordingFilters {
const nestedFilters = universalFilters.filter_group.values[0] as UniversalFiltersGroup
const filters = nestedFilters.values as UniversalFilterValue[]
const properties: AnyPropertyFilter[] = []
const events: FilterType['events'] = []
const actions: FilterType['actions'] = []
let console_logs: FilterableLogLevel[] = []
let console_search_query = ''
filters.forEach((f) => {
if (isEventFilter(f)) {
events.push(f)
} else if (isActionFilter(f)) {
actions.push(f)
} else if (isAnyPropertyfilter(f)) {
if (f.type === PropertyFilterType.Recording) {
if (f.key === 'console_log_level') {
console_logs = f.value as FilterableLogLevel[]
} else if (f.key === 'console_log_query') {
console_search_query = (f.value || '') as string
}
} else {
properties.push(f)
}
}
})
const durationFilter = universalFilters.duration[0]
return {
...universalFilters,
properties,
events,
actions,
session_recording_duration: { ...durationFilter, key: 'duration' },
duration_type_filter: durationFilter.key,
console_search_query,
console_logs,
}
}
export interface SessionRecordingPlaylistLogicProps {
logicKey?: string
@@ -113,6 +170,7 @@ export interface SessionRecordingPlaylistLogicProps {
updateSearchParams?: boolean
autoPlay?: boolean
hideSimpleFilters?: boolean
universalFilters?: RecordingUniversalFilters
advancedFilters?: RecordingFilters
simpleFilters?: RecordingFilters
onFiltersChange?: (filters: RecordingFilters) => void
@@ -148,6 +206,7 @@ export const sessionRecordingsPlaylistLogic = kea<sessionRecordingsPlaylistLogic
],
}),
actions({
setUniversalFilters: (filters: Partial<RecordingUniversalFilters>) => ({ filters }),
setAdvancedFilters: (filters: Partial<RecordingFilters>) => ({ filters }),
setSimpleFilters: (filters: SimpleFiltersType) => ({ filters }),
setShowFilters: (showFilters: boolean) => ({ showFilters }),
@@ -355,6 +414,18 @@ export const sessionRecordingsPlaylistLogic = kea<sessionRecordingsPlaylistLogic
resetFilters: () => getDefaultFilters(props.personUUID),
},
],
universalFilters: [
props.universalFilters ?? DEFAULT_RECORDING_UNIVERSAL_FILTERS,
{
setUniversalFilters: (state, { filters }) => {
return {
...state,
...filters,
}
},
resetFilters: () => DEFAULT_RECORDING_UNIVERSAL_FILTERS,
},
],
showFilters: [
true,
{
@@ -465,6 +536,12 @@ export const sessionRecordingsPlaylistLogic = kea<sessionRecordingsPlaylistLogic
capturePartialFilters(filters)
actions.loadEventsHaveSessionId()
},
setUniversalFilters: ({ filters }) => {
actions.loadSessionRecordings()
props.onFiltersChange?.(values.filters)
capturePartialFilters(filters)
actions.loadEventsHaveSessionId()
},
setOrderBy: () => {
actions.loadSessionRecordings()
@@ -512,12 +589,20 @@ export const sessionRecordingsPlaylistLogic = kea<sessionRecordingsPlaylistLogic
(s) => [s.featureFlags],
(featureFlags) => !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_HOG_QL_FILTERING],
],
useUniversalFiltering: [
(s) => [s.featureFlags],
(featureFlags) => !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_UNIVERSAL_FILTERS],
],
logicProps: [() => [(_, props) => props], (props): SessionRecordingPlaylistLogicProps => props],
filters: [
(s) => [s.simpleFilters, s.advancedFilters],
(simpleFilters, advancedFilters): RecordingFilters => {
(s) => [s.simpleFilters, s.advancedFilters, s.universalFilters, s.featureFlags],
(simpleFilters, advancedFilters, universalFilters, featureFlags): RecordingFilters => {
if (featureFlags[FEATURE_FLAGS.SESSION_REPLAY_UNIVERSAL_FILTERS]) {
return convertUniversalFiltersToLegacyFilters(universalFilters)
}
return {
...advancedFilters,
events: [...(simpleFilters?.events || []), ...(advancedFilters?.events || [])],

View File

@@ -306,7 +306,9 @@ export const personalAPIKeysLogic = kea<personalAPIKeysLogicType>([
<>
<p className="mb-4">You can now use key "{key.label}" for authentication:</p>
<CodeSnippet thing="personal API key">{value}</CodeSnippet>
<CodeSnippet className="ph-no-capture" thing="personal API key">
{value}
</CodeSnippet>
<LemonBanner type="warning" className="mt-4">
For security reasons the value above <em>will never be shown again</em>.

View File

@@ -7,12 +7,15 @@ import { IconPlusSmall, IconTrash } from '@posthog/icons'
import { LemonButton, LemonCheckbox, LemonInput, LemonSelect } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
import { Group } from 'kea-forms'
import { FEATURE_FLAGS } from 'lib/constants'
import { SortableDragIcon } from 'lib/lemon-ui/icons'
import { LemonField } from 'lib/lemon-ui/LemonField'
import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic'
import { Survey, SurveyQuestionType } from '~/types'
import { defaultSurveyFieldValues, NewSurvey, SurveyQuestionLabel } from './constants'
import { QuestionBranchingInput } from './QuestionBranchingInput'
import { HTMLEditor } from './SurveyAppearanceUtils'
import { surveyLogic } from './surveyLogic'
@@ -85,6 +88,10 @@ export function SurveyEditQuestionHeader({
export function SurveyEditQuestionGroup({ index, question }: { index: number; question: any }): JSX.Element {
const { survey, descriptionContentType } = useValues(surveyLogic)
const { setDefaultForQuestionType, setSurveyValue } = useActions(surveyLogic)
const { featureFlags } = useValues(enabledFeaturesLogic)
const hasBranching =
featureFlags[FEATURE_FLAGS.SURVEYS_BRANCHING_LOGIC] &&
(question.type === SurveyQuestionType.Rating || question.type === SurveyQuestionType.SingleChoice)
const initialDescriptionContentType = descriptionContentType(index) ?? 'text'
@@ -332,6 +339,7 @@ export function SurveyEditQuestionGroup({ index, question }: { index: number; qu
}
/>
</LemonField>
{hasBranching && <QuestionBranchingInput questionIndex={index} question={question} />}
</div>
</Group>
)

View File

@@ -16,10 +16,13 @@ import { hogql } from '~/queries/utils'
import {
Breadcrumb,
FeatureFlagFilters,
MultipleSurveyQuestion,
PropertyFilterType,
PropertyOperator,
RatingSurveyQuestion,
Survey,
SurveyQuestionBase,
SurveyQuestionBranchingType,
SurveyQuestionType,
SurveyUrlMatchType,
} from '~/types'
@@ -154,6 +157,7 @@ export const surveyLogic = kea<surveyLogicType>([
isEditingDescription,
isEditingThankYouMessage,
}),
setQuestionBranching: (questionIndex, value) => ({ questionIndex, value }),
archiveSurvey: true,
setWritingHTMLDescription: (writingHTML: boolean) => ({ writingHTML }),
setSurveyTemplateValues: (template: any) => ({ template }),
@@ -657,6 +661,44 @@ export const surveyLogic = kea<surveyLogicType>([
const newTemplateSurvey = { ...NEW_SURVEY, ...template }
return newTemplateSurvey
},
setQuestionBranching: (state, { questionIndex, value }) => {
const newQuestions = [...state.questions]
const question = newQuestions[questionIndex]
if (
question.type !== SurveyQuestionType.Rating &&
question.type !== SurveyQuestionType.SingleChoice
) {
throw new Error(
`Survey question type must be ${SurveyQuestionType.Rating} or ${SurveyQuestionType.SingleChoice}`
)
}
if (value === SurveyQuestionBranchingType.NextQuestion) {
delete question.branching
} else if (value === SurveyQuestionBranchingType.ConfirmationMessage) {
question.branching = {
type: SurveyQuestionBranchingType.ConfirmationMessage,
}
} else if (value === SurveyQuestionBranchingType.ResponseBased) {
question.branching = {
type: SurveyQuestionBranchingType.ResponseBased,
responseValue: {},
}
} else if (value.startsWith(SurveyQuestionBranchingType.SpecificQuestion)) {
const nextQuestionIndex = parseInt(value.split(':')[1])
question.branching = {
type: SurveyQuestionBranchingType.SpecificQuestion,
index: nextQuestionIndex,
}
}
newQuestions[questionIndex] = question
return {
...state,
questions: newQuestions,
}
},
},
],
selectedPageIndex: [
@@ -882,6 +924,28 @@ export const surveyLogic = kea<surveyLogicType>([
}
},
],
getBranchingDropdownValue: [
(s) => [s.survey],
(survey) => (questionIndex: number, question: RatingSurveyQuestion | MultipleSurveyQuestion) => {
if (question.branching?.type) {
const { type } = question.branching
if (type === SurveyQuestionBranchingType.SpecificQuestion) {
const nextQuestionIndex = question.branching.index
return `${SurveyQuestionBranchingType.SpecificQuestion}:${nextQuestionIndex}`
}
return type
}
// No branching specified, default to Next question / Confirmation message
if (questionIndex < survey.questions.length - 1) {
return SurveyQuestionBranchingType.NextQuestion
}
return SurveyQuestionBranchingType.ConfirmationMessage
},
],
}),
forms(({ actions, props, values }) => ({
survey: {

View File

@@ -120,13 +120,13 @@ export const urls = {
encode ? `/persons/${encodeURIComponent(uuid)}` : `/persons/${uuid}`,
persons: (): string => '/persons',
pipelineNodeDataWarehouseNew: (): string => `/pipeline/new/data-warehouse`,
pipelineNodeNew: (stage: PipelineStage | ':stage', pluginIdOrBatchExportDestination?: string | number): string => {
pipelineNodeNew: (stage: PipelineStage | ':stage', id?: string | number): string => {
if (stage === PipelineStage.DataImport) {
// should match 'pipelineNodeDataWarehouseNew'
return `/pipeline/new/data-warehouse`
}
return `/pipeline/new/${stage}${pluginIdOrBatchExportDestination ? `/${pluginIdOrBatchExportDestination}` : ''}`
return `/pipeline/new/${stage}${id ? `/${id}` : ''}`
},
pipeline: (tab?: PipelineTab | ':tab'): string => `/pipeline/${tab ? tab : PipelineTab.Overview}`,
/** @param id 'new' for new, uuid for batch exports and numbers for plugins */
@@ -149,6 +149,8 @@ export const urls = {
earlyAccessFeatures: (): string => '/early_access_features',
/** @param id A UUID or 'new'. ':id' for routing. */
earlyAccessFeature: (id: string): string => `/early_access_features/${id}`,
errorTracking: (): string => '/error_tracking',
errorTrackingGroup: (id: string): string => `/error_tracking/${id}`,
surveys: (): string => '/surveys',
/** @param id A UUID or 'new'. ':id' for routing. */
survey: (id: string): string => `/surveys/${id}`,

View File

@@ -23,14 +23,14 @@ export const WebAnalyticsNotice = (): JSX.Element => {
<LemonButton
type="secondary"
icon={<IconBug />}
onClick={() => openSupportForm({ kind: 'bug' })}
onClick={() => openSupportForm({ kind: 'bug', isEmailFormOpen: true })}
>
Report a bug
</LemonButton>
<LemonButton
type="secondary"
icon={<IconFeedback />}
onClick={() => openSupportForm({ kind: 'feedback' })}
onClick={() => openSupportForm({ kind: 'feedback', isEmailFormOpen: true })}
>
Give feedback
</LemonButton>

View File

@@ -330,7 +330,7 @@ export const WebStatsTrendTile = ({
}, [onWorldMapClick, insightProps])
return (
<div className="border rounded bg-bg-light flex-1">
<div className="border rounded bg-bg-light flex-1 flex flex-col">
{showIntervalTile && (
<div className="flex flex-row items-center justify-end m-2 mr-4">
<div className="flex flex-row items-center">

View File

@@ -156,13 +156,11 @@ $colors: (
// These vars are modified via SCSS for legacy reasons (e.g. darken/lighten), so keeping as SCSS vars for now.
$_primary: map.get($colors, 'primary');
$_success: map.get($colors, 'success');
$_danger: map.get($colors, 'danger');
$_primary_bg_hover: rgba($_primary, 0.1);
$_primary_bg_active: rgba($_primary, 0.2);
$_lifecycle_new: $_primary;
$_lifecycle_returning: $_success;
$_lifecycle_resurrecting: #a56eff; // --data-lilac
$_lifecycle_dormant: $_danger;
$_lifecycle_dormant: map.get($colors, 'danger');
// root variables are defined as a mixin here because
// the toolbar needs them attached to :host not :root
@@ -193,9 +191,6 @@ $_lifecycle_dormant: $_danger;
--green: var(--success);
--black: var(--default);
// Tag colors
--purple-light: #dcb1e3;
//// Data colors (e.g. insight series). Note: colors.ts relies on these values being hexadecimal
--data-color-1: #1d4aff;
--data-color-2: #621da6;
@@ -227,22 +222,6 @@ $_lifecycle_dormant: $_danger;
// TODO: unify with lib/colors.ts, getGraphColors()
--funnel-axis: var(--border);
--funnel-grid: #ddd;
--antd-table-background-dark: #fafafa;
// Session Recording
--recording-spacing: calc(2rem / 3);
--recording-player-container-bg: #797973;
--recording-buffer-bg: #faaf8c;
--recording-seekbar-red: var(--brand-red);
--recording-hover-event: var(--primary-bg-hover);
--recording-hover-event-mid: var(--primary-bg-active);
--recording-hover-event-dark: var(--primary-3000);
--recording-current-event: #eef2ff;
--recording-current-event-dark: var(--primary-alt);
--recording-failure-event: #fee9e2;
--recording-failure-event-dark: #cd3000;
--recording-highlight-event: var(--mark);
--recording-highlight-event-dark: #946508;
// Z-indexes
--z-bottom-notice: 5100;

View File

@@ -5,6 +5,7 @@ import { ChartDataset, ChartType, InteractionItem } from 'chart.js'
import { LogicWrapper } from 'kea'
import { DashboardCompatibleScenes } from 'lib/components/SceneDashboardChoice/sceneDashboardChoiceModalLogic'
import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types'
import { UniversalFiltersGroup } from 'lib/components/UniversalFilters/UniversalFilters'
import {
BIN_COUNT_AUTO,
DashboardPrivilegeLevel,
@@ -777,7 +778,7 @@ export type AnyPropertyFilter =
| ElementPropertyFilter
| SessionPropertyFilter
| CohortPropertyFilter
| RecordingDurationFilter
| RecordingPropertyFilter
| GroupPropertyFilter
| FeaturePropertyFilter
| HogQLPropertyFilter
@@ -946,13 +947,17 @@ export type ActionStepProperties =
| ElementPropertyFilter
| CohortPropertyFilter
export interface RecordingDurationFilter extends BasePropertyFilter {
export interface RecordingPropertyFilter extends BasePropertyFilter {
type: PropertyFilterType.Recording
key: 'duration'
value: number
key: DurationType | 'console_log_level' | 'console_log_query'
operator: PropertyOperator
}
export interface RecordingDurationFilter extends RecordingPropertyFilter {
key: DurationType
value: number
}
export type DurationType = 'duration' | 'active_seconds' | 'inactive_seconds'
export type FilterableLogLevel = 'info' | 'warn' | 'error'
@@ -973,6 +978,18 @@ export interface RecordingFilters {
filter_test_accounts?: boolean
}
export interface RecordingUniversalFilters {
/**
* live mode is front end only, sets date_from and date_to to the last hour
*/
live_mode?: boolean
date_from?: string | null
date_to?: string | null
duration: RecordingDurationFilter[]
filter_test_accounts?: boolean
filter_group: UniversalFiltersGroup
}
export interface SessionRecordingsResponse {
results: SessionRecordingType[]
has_next: boolean
@@ -989,6 +1006,13 @@ export type ErrorCluster = {
}
export type ErrorClusterResponse = ErrorCluster[] | null
export type ErrorTrackingGroup = {
title: string
sampleEventProperties: EventType['properties']
occurrences: number
uniqueSessions: number
}
export type EntityType = 'actions' | 'events' | 'data_warehouse' | 'new_entity'
export interface Entity {
@@ -2649,6 +2673,11 @@ export interface RatingSurveyQuestion extends SurveyQuestionBase {
scale: number
lowerBoundLabel: string
upperBoundLabel: string
branching?:
| NextQuestionBranching
| ConfirmationMessageBranching
| ResponseBasedBranching
| SpecificQuestionBranching
}
export interface MultipleSurveyQuestion extends SurveyQuestionBase {
@@ -2656,6 +2685,11 @@ export interface MultipleSurveyQuestion extends SurveyQuestionBase {
choices: string[]
shuffleOptions?: boolean
hasOpenChoice?: boolean
branching?:
| NextQuestionBranching
| ConfirmationMessageBranching
| ResponseBasedBranching
| SpecificQuestionBranching
}
export type SurveyQuestion = BasicSurveyQuestion | LinkSurveyQuestion | RatingSurveyQuestion | MultipleSurveyQuestion
@@ -2668,6 +2702,31 @@ export enum SurveyQuestionType {
Link = 'link',
}
export enum SurveyQuestionBranchingType {
NextQuestion = 'next_question',
ConfirmationMessage = 'confirmation_message',
ResponseBased = 'response_based',
SpecificQuestion = 'specific_question',
}
interface NextQuestionBranching {
type: SurveyQuestionBranchingType.NextQuestion
}
interface ConfirmationMessageBranching {
type: SurveyQuestionBranchingType.ConfirmationMessage
}
interface ResponseBasedBranching {
type: SurveyQuestionBranchingType.ResponseBased
responseValue: Record<string, any>
}
interface SpecificQuestionBranching {
type: SurveyQuestionBranchingType.SpecificQuestion
index: number
}
export interface FeatureFlagGroupType {
properties?: AnyPropertyFilter[]
rollout_percentage?: number | null
@@ -3746,6 +3805,7 @@ export interface ExternalDataStripeSource {
prefix: string
last_run_at?: Dayjs
schemas: ExternalDataSourceSchema[]
sync_frequency: DataWarehouseSyncInterval
}
export interface SimpleExternalDataSourceSchema {
id: string
@@ -3879,6 +3939,8 @@ export type BatchExportService =
export type PipelineInterval = 'hour' | 'day' | 'every 5 minutes'
export type DataWarehouseSyncInterval = 'day' | 'week' | 'month'
export type BatchExportConfiguration = {
// User provided data for the export. This is the data that the user
// provides when creating the export.
@@ -4088,6 +4150,44 @@ export type OnboardingProduct = {
scene: Scene
}
export type HogFunctionInputSchemaType = {
type: 'string' | 'boolean' | 'dictionary' | 'choice' | 'json'
key: string
label: string
choices?: { value: string; label: string }[]
required?: boolean
default?: any
secret?: boolean
description?: string
}
export type HogFunctionType = {
id: string
name: string
description: string
created_by: UserBasicType | null
created_at: string
updated_at: string
enabled: boolean
hog: string
inputs_schema: HogFunctionInputSchemaType[]
inputs: Record<
string,
{
value: any
bytecode?: any
}
>
filters?: PluginConfigFilters | null
template?: HogFunctionTemplateType
}
export type HogFunctionTemplateType = Pick<
HogFunctionType,
'id' | 'name' | 'description' | 'hog' | 'inputs_schema' | 'filters'
>
export interface AnomalyCondition {
absoluteThreshold: {
lower?: number

File diff suppressed because it is too large Load Diff

View File

@@ -1439,9 +1439,13 @@ public:
std::vector<IdentifierContext *> identifier();
IdentifierContext* identifier(size_t i);
antlr4::tree::TerminalNode *OVER();
antlr4::tree::TerminalNode *LPAREN();
antlr4::tree::TerminalNode *RPAREN();
std::vector<antlr4::tree::TerminalNode *> LPAREN();
antlr4::tree::TerminalNode* LPAREN(size_t i);
std::vector<antlr4::tree::TerminalNode *> RPAREN();
antlr4::tree::TerminalNode* RPAREN(size_t i);
ColumnExprListContext *columnExprList();
antlr4::tree::TerminalNode *DISTINCT();
ColumnArgListContext *columnArgList();
virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override;
};
@@ -1635,6 +1639,8 @@ public:
std::vector<antlr4::tree::TerminalNode *> RPAREN();
antlr4::tree::TerminalNode* RPAREN(size_t i);
ColumnExprListContext *columnExprList();
antlr4::tree::TerminalNode *DISTINCT();
ColumnArgListContext *columnArgList();
virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override;
};

File diff suppressed because one or more lines are too long

View File

@@ -1622,27 +1622,42 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor {
auto column_expr_list_ctx = ctx->columnExprList();
string name = visitAsString(ctx->identifier(0));
string over_identifier = visitAsString(ctx->identifier(1));
PyObject* args = visitAsPyObjectOrEmptyList(column_expr_list_ctx);
PyObject* exprs = visitAsPyObjectOrEmptyList(column_expr_list_ctx);
PyObject* args;
try {
args = visitAsPyObjectOrEmptyList(ctx->columnArgList());
} catch (...) {
Py_DECREF(exprs);
throw;
}
RETURN_NEW_AST_NODE(
"WindowFunction", "{s:s#,s:N,s:s#}", "name", name.data(), name.size(), "args", args, "over_identifier",
over_identifier.data(), over_identifier.size()
"WindowFunction", "{s:s#,s:N,s:N,s:s#}", "name", name.data(), name.size(), "exprs", exprs, "args", args,
"over_identifier", over_identifier.data(), over_identifier.size()
);
}
VISIT(ColumnExprWinFunction) {
string identifier = visitAsString(ctx->identifier());
auto column_expr_list_ctx = ctx->columnExprList();
PyObject* args = visitAsPyObjectOrEmptyList(column_expr_list_ctx);
PyObject* exprs = visitAsPyObjectOrEmptyList(column_expr_list_ctx);
PyObject* args;
try {
args = visitAsPyObjectOrEmptyList(ctx->columnArgList());
} catch (...) {
Py_DECREF(exprs);
throw;
}
PyObject* over_expr;
try {
over_expr = visitAsPyObjectOrNone(ctx->windowExpr());
} catch (...) {
Py_DECREF(exprs);
Py_DECREF(args);
throw;
}
RETURN_NEW_AST_NODE(
"WindowFunction", "{s:s#,s:N,s:N}", "name", identifier.data(), identifier.size(), "args", args, "over_expr",
over_expr
"WindowFunction", "{s:s#,s:N,s:N,s:N}", "name", identifier.data(), identifier.size(), "exprs", exprs,
"args", args, "over_expr", over_expr
);
}

View File

@@ -32,7 +32,7 @@ module = Extension(
setup(
name="hogql_parser",
version="1.0.11",
version="1.0.12",
url="https://github.com/PostHog/posthog/tree/master/hogql_parser",
author="PostHog Inc.",
author_email="hey@posthog.com",

View File

@@ -1,9 +1,9 @@
{
"name": "@posthog/hogvm",
"version": "1.0.10",
"description": "PostHog HogQL Virtual Machine",
"types": "dist/execute.d.ts",
"main": "dist/execute.js",
"version": "1.0.11",
"description": "PostHog Hog Virtual Machine",
"types": "dist/index.d.ts",
"main": "dist/index.js",
"packageManager": "pnpm@8.3.1",
"scripts": {
"test": "jest --runInBand --forceExit",

View File

@@ -1,6 +1,6 @@
import { Operation } from './operation'
import { ASYNC_STL, STL } from './stl/stl'
import { convertJSToHog, getNestedValue, like, setNestedValue } from './utils'
import { convertHogToJS, convertJSToHog, getNestedValue, like, setNestedValue } from './utils'
const DEFAULT_MAX_ASYNC_STEPS = 100
const DEFAULT_TIMEOUT = 5 // seconds
@@ -58,8 +58,10 @@ export async function execAsync(bytecode: any[], options?: ExecOptions): Promise
if (response.state && response.asyncFunctionName && response.asyncFunctionArgs) {
vmState = response.state
if (options?.asyncFunctions && response.asyncFunctionName in options.asyncFunctions) {
const result = await options?.asyncFunctions[response.asyncFunctionName](...response.asyncFunctionArgs)
vmState.stack.push(result)
const result = await options?.asyncFunctions[response.asyncFunctionName](
...response.asyncFunctionArgs.map(convertHogToJS)
)
vmState.stack.push(convertJSToHog(result))
} else if (response.asyncFunctionName in ASYNC_STL) {
const result = await ASYNC_STL[response.asyncFunctionName](
response.asyncFunctionArgs,
@@ -333,7 +335,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult {
.fill(null)
.map(() => popStack())
if (options?.functions && options.functions[name] && name !== 'toString') {
stack.push(options.functions[name](...args))
stack.push(convertJSToHog(options.functions[name](...args.map(convertHogToJS))))
} else if (
name !== 'toString' &&
((options?.asyncFunctions && options.asyncFunctions[name]) || name in ASYNC_STL)

View File

@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0016_rolemembership_organization_member
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
posthog: 0424_survey_current_iteration_and_more
posthog: 0426_externaldatasource_sync_frequency
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019

View File

@@ -3,6 +3,45 @@ posthog/temporal/common/utils.py:0: note: This is likely because "from_activity"
posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type]
posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment]
posthog/temporal/data_imports/pipelines/zendesk/talk_api.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Need type annotation for "resources" (hint: "resources: dict[<type>, <type>] = ...") [var-annotated]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "ResolvedParam | None", variable has type "ResolvedParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Statement is unreachable [unreachable]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "incremental_param" (default has type "IncrementalParam | None", argument has type "IncrementalParam") [assignment]
posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type]
posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment]
posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type
@@ -700,7 +739,6 @@ posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0:
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item]
posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "last_uploaded_part_timestamp" [attr-defined]
posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "upload_state" [attr-defined]
posthog/temporal/data_imports/pipelines/test/test_pipeline.py:0: error: Argument "run_id" to "PipelineInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
@@ -709,11 +747,6 @@ posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not define
posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined]
posthog/api/action.py:0: error: Argument 1 to <tuple> has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type]
posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Argument "run_id" to "ImportDataActivityInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Argument "run_id" to "ImportDataActivityInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Argument "run_id" to "ImportDataActivityInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Argument "run_id" to "ImportDataActivityInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Argument "run_id" to "ImportDataActivityInputs" has incompatible type "UUID"; expected "str" [arg-type]
posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type]
posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]

View File

@@ -146,7 +146,7 @@
"pmtiles": "^2.11.0",
"postcss": "^8.4.31",
"postcss-preset-env": "^9.3.0",
"posthog-js": "1.138.2",
"posthog-js": "1.139.0",
"posthog-js-lite": "3.0.0",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",

View File

@@ -50,6 +50,7 @@
"@google-cloud/storage": "^5.8.5",
"@maxmind/geoip2-node": "^3.4.0",
"@posthog/clickhouse": "^1.7.0",
"@posthog/hogvm": "^1.0.11",
"@posthog/plugin-scaffold": "1.4.4",
"@sentry/node": "^7.49.0",
"@sentry/profiling-node": "^0.3.0",

View File

@@ -43,6 +43,9 @@ dependencies:
'@posthog/clickhouse':
specifier: ^1.7.0
version: 1.7.0
'@posthog/hogvm':
specifier: ^1.0.11
version: 1.0.11
'@posthog/plugin-scaffold':
specifier: 1.4.4
version: 1.4.4
@@ -3104,6 +3107,10 @@ packages:
engines: {node: '>=12'}
dev: false
/@posthog/hogvm@1.0.11:
resolution: {integrity: sha512-W1m4UPmpaNwm9+Rwpb3rjuZd3z+/gO9MsxibCnxdTndrFgIrNjGOas2ZEpZqJblV3sgubFbGq6IXdORbM+nv5w==}
dev: false
/@posthog/plugin-scaffold@1.4.4:
resolution: {integrity: sha512-3z1ENm1Ys5lEQil0H7TVOqHvD24+ydiZFk5hggpbHRx1iOxAK+Eu5qFyAROwPUcCo7NOYjmH2xL1C4B1vaHilg==}
dependencies:

View File

@@ -23,6 +23,7 @@ export function getPluginServerCapabilities(config: PluginsServerConfig): Plugin
personOverrides: true,
appManagementSingleton: true,
preflightSchedules: true,
cdpProcessedEvents: true,
...sharedCapabilities,
}
case PluginServerMode.ingestion:
@@ -87,5 +88,11 @@ export function getPluginServerCapabilities(config: PluginsServerConfig): Plugin
personOverrides: true,
...sharedCapabilities,
}
case PluginServerMode.cdp_processed_events:
return {
cdpProcessedEvents: true,
...sharedCapabilities,
}
}
}

View File

@@ -137,7 +137,6 @@ export function getDefaultConfig(): PluginsServerConfig {
RUSTY_HOOK_ROLLOUT_PERCENTAGE: 0,
RUSTY_HOOK_URL: '',
CAPTURE_CONFIG_REDIS_HOST: null,
LAZY_PERSON_CREATION_TEAMS: '',
STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes
STARTUP_PROFILE_CPU: false,

View File

@@ -333,11 +333,13 @@ export class SessionRecordingIngester {
}
public async handleEachBatch(messages: Message[], heartbeat: () => void): Promise<void> {
status.info('🔁', `blob_ingester_consumer - handling batch`, {
size: messages.length,
partitionsInBatch: [...new Set(messages.map((x) => x.partition))],
assignedPartitions: this.assignedPartitions,
})
if (messages.length !== 0) {
status.info('🔁', `blob_ingester_consumer - handling batch`, {
size: messages.length,
partitionsInBatch: [...new Set(messages.map((x) => x.partition))],
assignedPartitions: this.assignedPartitions,
})
}
await runInstrumentedFunction({
statsKey: `recordingingester.handleEachBatch`,
sendTimeoutGuardToSentry: false,

View File

@@ -10,7 +10,8 @@ import { Counter } from 'prom-client'
import v8Profiler from 'v8-profiler-next'
import { getPluginServerCapabilities } from '../capabilities'
import { buildIntegerMatcher, defaultConfig, sessionRecordingConsumerConfig } from '../config/config'
import { CdpProcessedEventsConsumer } from '../cdp/cdp-processed-events-consumer'
import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config'
import { Hub, PluginServerCapabilities, PluginsServerConfig } from '../types'
import { createHub, createKafkaClient, createKafkaProducerWrapper } from '../utils/db/hub'
import { PostgresRouter } from '../utils/db/postgres'
@@ -105,6 +106,8 @@ export async function startPluginsServer(
let onEventHandlerConsumer: KafkaJSIngestionConsumer | undefined
let stopWebhooksHandlerConsumer: () => Promise<void> | undefined
const shutdownCallbacks: (() => Promise<void>)[] = []
// Kafka consumer. Handles events that we couldn't find an existing person
// to associate. The buffer handles delaying the ingestion of these events
// (default 60 seconds) to allow for the person to be created in the
@@ -157,6 +160,7 @@ export async function startPluginsServer(
stopSessionRecordingBlobOverflowConsumer?.(),
schedulerTasksConsumer?.disconnect(),
personOverridesPeriodicTask?.stop(),
...shutdownCallbacks.map((cb) => cb()),
])
if (piscina) {
@@ -370,14 +374,7 @@ export async function startPluginsServer(
const teamManager = hub?.teamManager ?? new TeamManager(postgres, serverConfig)
const organizationManager = hub?.organizationManager ?? new OrganizationManager(postgres, teamManager)
const KafkaProducerWrapper = hub?.kafkaProducer ?? (await createKafkaProducerWrapper(serverConfig))
const rustyHook =
hub?.rustyHook ??
new RustyHook(
buildIntegerMatcher(serverConfig.RUSTY_HOOK_FOR_TEAMS, true),
serverConfig.RUSTY_HOOK_ROLLOUT_PERCENTAGE,
serverConfig.RUSTY_HOOK_URL,
serverConfig.EXTERNAL_REQUEST_TIMEOUT_MS
)
const rustyHook = hub?.rustyHook ?? new RustyHook(serverConfig)
const appMetrics =
hub?.appMetrics ??
new AppMetrics(
@@ -494,6 +491,21 @@ export async function startPluginsServer(
}
}
if (capabilities.cdpProcessedEvents) {
;[hub, closeHub] = hub ? [hub, closeHub] : await createHub(serverConfig, capabilities)
const consumer = new CdpProcessedEventsConsumer(serverConfig, hub)
await consumer.start()
if (consumer.batchConsumer) {
shutdownOnConsumerExit(consumer.batchConsumer)
}
shutdownCallbacks.push(async () => {
await consumer.stop()
})
healthChecks['cdp-processed-events'] = () => consumer.isHealthy() ?? false
}
if (capabilities.personOverrides) {
const postgres = hub?.postgres ?? new PostgresRouter(serverConfig)
const kafkaProducer = hub?.kafkaProducer ?? (await createKafkaProducerWrapper(serverConfig))

Some files were not shown because too many files have changed in this diff Show More