feat: bump clickhouse version to 23.4 for really exciting features (#15574)

* feature: bump clickhouse version to 23.4 for really exciting features

* remove default for kafka table

* Update query snapshots

* don't use defaults on kafka tables

* Update query snapshots

* clickhouse formatDateTime string has changed

* run both 22.8 and 23.4 for now

---------

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
James Greenhill
2023-05-17 11:00:08 -07:00
committed by GitHub
parent a82ae71478
commit 9e5b92e7d8
5 changed files with 13 additions and 15 deletions

View File

@@ -236,7 +236,7 @@ jobs:
fail-fast: false
matrix:
python-version: ['3.10.10']
clickhouse-server-image: ['clickhouse/clickhouse-server:22.8']
clickhouse-server-image: ['clickhouse/clickhouse-server:22.8', 'clickhouse/clickhouse-server:23.4']
segment: ['FOSS', 'EE']
person-on-events: [false, true]
# :NOTE: Keep concurrency and groups in sync

View File

@@ -24,7 +24,7 @@ services:
# Note: please keep the default version in sync across
# `posthog` and the `charts-clickhouse` repos
#
image: ${CLICKHOUSE_SERVER_IMAGE:-clickhouse/clickhouse-server:22.8}
image: ${CLICKHOUSE_SERVER_IMAGE:-clickhouse/clickhouse-server:23.4}
restart: on-failure
depends_on:
- kafka

View File

@@ -60,7 +60,7 @@ def query_events() -> List[Dict]:
FROM events
ORDER BY distinct_id
""",
{"format": "%Y-%m-%dT%H:%M:%SZ"},
{"format": "%Y-%m-%dT%H:%i:%sZ"},
)
@@ -502,7 +502,6 @@ class Test0007PersonsAndGroupsOnEventsBackfill(AsyncMigrationBaseTest, Clickhous
MIGRATION_DEFINITION._check_person_data() # type: ignore
def test_check_person_data_failure(self):
for i in range(100):
_uuid = UUIDT()
@@ -559,7 +558,6 @@ class Test0007PersonsAndGroupsOnEventsBackfill(AsyncMigrationBaseTest, Clickhous
MIGRATION_DEFINITION._check_person_data() # type: ignore
def test_check_groups_data_success(self):
# don't run the backfill so we can test the postcheck based only on the data we create
old_fn = MIGRATION_DEFINITION.operations[-4].fn
MIGRATION_DEFINITION.operations[-4].fn = lambda *args: None

View File

@@ -212,7 +212,7 @@
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0,
is_deleted Int8,
version UInt64
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_person_test', 'group1', 'JSONEachRow')
@@ -228,7 +228,7 @@
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
version Int64
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_person_distinct_id_test', 'group1', 'JSONEachRow')
@@ -793,7 +793,7 @@
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0,
is_deleted Int8,
version UInt64
) ENGINE = Kafka('kafka:9092', 'clickhouse_person_test', 'group1', 'JSONEachRow')
@@ -809,7 +809,7 @@
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
version Int64
) ENGINE = Kafka('kafka:9092', 'clickhouse_person_distinct_id_test', 'group1', 'JSONEachRow')
@@ -1001,7 +1001,7 @@
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0,
is_deleted Int8,
version UInt64
@@ -1025,7 +1025,7 @@
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
version Int64
, _timestamp DateTime
@@ -1805,7 +1805,7 @@
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0,
is_deleted Int8,
version UInt64
@@ -1829,7 +1829,7 @@
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
version Int64
, _timestamp DateTime

View File

@@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0,
is_deleted Int8,
version UInt64
{extra_fields}
) ENGINE = {engine}
@@ -170,7 +170,7 @@ CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
version Int64
{extra_fields}
) ENGINE = {engine}
"""