feat(logs): allow second intervals for logs time buckets (#40459)

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Frank Hamand
2025-10-30 08:39:05 +00:00
committed by GitHub
parent 8f2b2ca700
commit e70ae69c5c
17 changed files with 201 additions and 19 deletions

View File

@@ -30,6 +30,7 @@ import { useAnnotationsPositioning } from './useAnnotationsPositioning'
/** User-facing format for annotation groups. */
const INTERVAL_UNIT_TO_HUMAN_DAYJS_FORMAT: Record<IntervalType, string> = {
second: 'MMMM D, YYYY H:mm:ss',
minute: 'MMMM D, YYYY H:mm:00',
hour: 'MMMM D, YYYY H:00',
day: 'MMMM D, YYYY',

View File

@@ -401,6 +401,29 @@ describe('annotationsOverlayLogic', () => {
Record<IntervalType, Record<string, AnnotationType[]>> // All IntervalType variants should be covered
> = {
UTC: {
second: {
'2022-08-10 04:00:00+0000': [MOCK_ANNOTATION_ORG_SCOPED, MOCK_ANNOTATION_DASHBOARD_SCOPED].map(
(annotation) => deserializeAnnotation(annotation, 'UTC')
),
'2022-08-10 04:00:01+0000': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
'2022-08-10 04:01:00+0000': [MOCK_ANNOTATION_PROJECT_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
'2022-08-10 05:00:00+0000': [MOCK_ANNOTATION_INSIGHT_1_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
'2022-08-11 04:00:00+0000': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
'2022-08-17 04:00:00+0000': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
'2022-09-10 04:00:00+0000': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'UTC')
),
},
minute: {
'2022-08-10 04:00:00+0000': [
MOCK_ANNOTATION_ORG_SCOPED,
@@ -494,6 +517,29 @@ describe('annotationsOverlayLogic', () => {
},
'America/Phoenix': {
// Purposefully using Phoenix for test determinism - Arizona does NOT observe DST
second: {
'2022-08-09 21:00:00-0700': [MOCK_ANNOTATION_ORG_SCOPED, MOCK_ANNOTATION_DASHBOARD_SCOPED].map(
(annotation) => deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-08-09 21:00:01-0700': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-08-09 21:01:00-0700': [MOCK_ANNOTATION_PROJECT_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-08-09 22:00:00-0700': [MOCK_ANNOTATION_INSIGHT_1_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-08-10 21:00:00-0700': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-08-16 21:00:00-0700': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
'2022-09-09 21:00:00-0700': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'America/Phoenix')
),
},
minute: {
'2022-08-09 21:00:00-0700': [
MOCK_ANNOTATION_ORG_SCOPED,
@@ -587,6 +633,29 @@ describe('annotationsOverlayLogic', () => {
},
'Europe/Moscow': {
// Purposefully using Moscow for test determinism - Russia does NOT observe DST
second: {
'2022-08-10 07:00:00+0300': [MOCK_ANNOTATION_ORG_SCOPED, MOCK_ANNOTATION_DASHBOARD_SCOPED].map(
(annotation) => deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-08-10 07:00:01+0300': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-08-10 07:01:00+0300': [MOCK_ANNOTATION_PROJECT_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-08-10 08:00:00+0300': [MOCK_ANNOTATION_INSIGHT_1_SCOPED].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-08-11 07:00:00+0300': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-08-17 07:00:00+0300': [MOCK_ANNOTATION_ORG_SCOPED_FROM_INSIGHT_1].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
'2022-09-10 07:00:00+0300': [MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_3].map((annotation) =>
deserializeAnnotation(annotation, 'Europe/Moscow')
),
},
minute: {
'2022-08-10 07:00:00+0300': [
MOCK_ANNOTATION_ORG_SCOPED,

View File

@@ -17,6 +17,7 @@ interface DateDisplayProps {
}
const DISPLAY_DATE_FORMAT: Record<IntervalType, string> = {
second: 'HH:mm:ss',
minute: 'HH:mm:00',
hour: 'HH:00',
day: 'D MMM',
@@ -26,6 +27,8 @@ const DISPLAY_DATE_FORMAT: Record<IntervalType, string> = {
const dateHighlight = (parsedDate: dayjs.Dayjs, interval: IntervalType): string => {
switch (interval) {
case 'second':
return parsedDate.format('MMM D')
case 'minute':
return parsedDate.format('MMM D')
case 'hour':

View File

@@ -10,6 +10,7 @@ import { Tooltip } from 'lib/lemon-ui/Tooltip'
import { DateOption, rollingDateRangeFilterLogic } from './rollingDateRangeFilterLogic'
const dateOptions: LemonSelectOptionLeaf<DateOption>[] = [
{ value: 'seconds', label: 'seconds' },
{ value: 'minutes', label: 'minutes' },
{ value: 'hours', label: 'hours' },
{ value: 'days', label: 'days' },

View File

@@ -15,6 +15,7 @@ const dateOptionsMap = {
d: 'days',
h: 'hours',
M: 'minutes',
s: 'seconds',
} as const
export type DateOption = (typeof dateOptionsMap)[keyof typeof dateOptionsMap]
@@ -115,6 +116,8 @@ export const rollingDateRangeFilterLogic = kea<rollingDateRangeFilterLogicType>(
return `-${counter}h`
case 'minutes':
return `-${counter}M`
case 'seconds':
return `-${counter}s`
default:
return `-${counter}d`
}

View File

@@ -4,6 +4,16 @@ import { IntervalType, SmoothingType } from '~/types'
// the typing should catch if we update SmoothingType but do not add an explicit
// option to this lookup
export const smoothingOptions: Record<IntervalType, { label: string; value: SmoothingType }[]> = {
second: [
{
label: 'No smoothing',
value: 1,
},
{
label: '1-minute average',
value: 60,
},
],
minute: [
{
label: 'No smoothing',

View File

@@ -1042,6 +1042,7 @@ const dateOptionsMap = {
d: 'day',
h: 'hour',
M: 'minute',
s: 'second',
} as const
export function dateFilterToText(
@@ -1108,6 +1109,9 @@ export function dateFilterToText(
case 'minute':
date = dayjs().subtract(counter, 'm')
break
case 'second':
date = dayjs().subtract(counter, 's')
break
default:
date = dayjs().subtract(counter, 'd')
break
@@ -1184,6 +1188,9 @@ export function componentsToDayJs(
case 'minute':
response = dayjsInstance.add(amount, 'minute')
break
case 'second':
response = dayjsInstance.add(amount, 'second')
break
default:
throw new UnexpectedNeverError(unit)
}
@@ -1316,6 +1323,11 @@ export const areDatesValidForInterval = (
parsedOldDateTo.diff(parsedOldDateFrom, 'minute') >= 2 &&
parsedOldDateTo.diff(parsedOldDateFrom, 'minute') < 60 * 12 // 12 hours. picked based on max graph resolution
)
} else if (interval === 'second') {
return (
parsedOldDateTo.diff(parsedOldDateFrom, 'second') >= 2 &&
parsedOldDateTo.diff(parsedOldDateFrom, 'second') < 60 * 60 // 1 hour
)
}
throw new UnexpectedNeverError(interval)
}

View File

@@ -16203,7 +16203,7 @@
"type": "string"
},
"IntervalType": {
"enum": ["minute", "hour", "day", "week", "month"],
"enum": ["second", "minute", "hour", "day", "week", "month"],
"type": "string"
},
"LLMTrace": {

View File

@@ -95,6 +95,7 @@ export function getTooltipTitle(
}
export const INTERVAL_UNIT_TO_DAYJS_FORMAT: Record<IntervalType, string> = {
second: 'D MMM YYYY HH:mm:ss',
minute: 'D MMM YYYY HH:mm:00',
hour: 'D MMM YYYY HH:00',
day: 'D MMM YYYY',

View File

@@ -53,6 +53,7 @@ const POSSIBLY_FRACTIONAL_MATH_TYPES: Set<MathType> = new Set(
)
export const INTERVAL_TO_DEFAULT_MOVING_AVERAGE_PERIOD: Record<IntervalType, number> = {
second: 300,
minute: 10,
hour: 6,
day: 7,

View File

@@ -2465,7 +2465,7 @@ export type BreakdownType =
| 'data_warehouse'
| 'data_warehouse_person_property'
| 'revenue_analytics'
export type IntervalType = 'minute' | 'hour' | 'day' | 'week' | 'month'
export type IntervalType = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month'
export type SimpleIntervalType = 'day' | 'month'
export type SmoothingType = number
export type InsightSceneSource = 'web-analytics' | 'llm-analytics'

View File

@@ -13,8 +13,15 @@ from posthog.models.team import Team, WeekStartDay
from posthog.queries.util import get_earliest_timestamp, get_trunc_func_ch
from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
IntervalLiteral = Literal["minute", "hour", "day", "week", "month"]
ORDERED_INTERVALS = [IntervalType.MINUTE, IntervalType.HOUR, IntervalType.DAY, IntervalType.WEEK, IntervalType.MONTH]
IntervalLiteral = Literal["second", "minute", "hour", "day", "week", "month"]
ORDERED_INTERVALS = [
IntervalType.SECOND,
IntervalType.MINUTE,
IntervalType.HOUR,
IntervalType.DAY,
IntervalType.WEEK,
IntervalType.MONTH,
]
def compare_interval_length(
@@ -95,15 +102,15 @@ class QueryDateRange:
if not self._date_range or not self._date_range.explicitDate:
is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
if self.interval_name not in ("hour", "minute"):
if compare_interval_length(self.interval_type, ">", IntervalType.HOUR):
date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
elif is_relative:
if self.interval_name == "hour":
if self.interval_type == IntervalType.HOUR:
date_to = date_to.replace(minute=59, second=59, microsecond=999999)
else:
elif self.interval_type == IntervalType.MINUTE:
date_to = date_to.replace(second=59, microsecond=999999)
elif self.interval_type == IntervalType.SECOND:
date_to = (date_to - timedelta(seconds=1)).replace(microsecond=999999)
return date_to
def get_earliest_timestamp(self) -> datetime:
@@ -123,7 +130,7 @@ class QueryDateRange:
now=self.now_with_timezone,
# this makes sure we truncate date_from to the start of the day, when looking at last N days by hour
# when we look at graphs by minute (last hour or last three hours), don't truncate
always_truncate=not (self.interval_name == "minute" or self._exact_timerange),
always_truncate=not (self.interval_name in ("second", "minute") or self._exact_timerange),
)
else:
date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
@@ -184,6 +191,8 @@ class QueryDateRange:
def align_with_interval(self, start: datetime, *, interval_name: Optional[IntervalLiteral] = None) -> datetime:
interval_name = interval_name or self.interval_name
if interval_name == "second":
return start.replace(microsecond=0)
if interval_name == "minute":
return start.replace(second=0, microsecond=0)
if interval_name == "hour":
@@ -207,6 +216,7 @@ class QueryDateRange:
months=self.interval_count if self.interval_name == "month" else 0,
hours=self.interval_count if self.interval_name == "hour" else 0,
minutes=self.interval_count if self.interval_name == "minute" else 0,
seconds=self.interval_count if self.interval_name == "second" else 0,
)
def all_values(self, *, interval_name: Optional[IntervalLiteral] = None) -> list[datetime]:
@@ -364,6 +374,7 @@ class QueryDateRange:
PERIOD_MAP: dict[str, timedelta | relativedelta] = {
"second": timedelta(seconds=1),
"minute": timedelta(minutes=1),
"hour": timedelta(hours=1),
"day": timedelta(days=1),

View File

@@ -127,10 +127,70 @@ class TestQueryDateRange(APIBaseTest):
date_range = DateRange(date_from="-2M", date_to="-1M", explicitDate=False)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.MINUTE, now=now)
self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-24T23:58:00.000000Z"))
self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-24T23:59:59.999999Z"))
def test_second(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-120s", date_to="-60s", explicitDate=False)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.SECOND, now=now)
self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-24T23:58:00.000000Z"))
self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-24T23:58:59.999999Z"))
def test_parsed_date_second(self):
now = parser.isoparse("2021-08-25T12:30:00.000Z")
date_range = DateRange(date_from="-60s")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.SECOND, now=now)
self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-25T12:29:00Z"))
self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T12:29:59.999999Z"))
def test_second_interval_with_explicit_date(self):
now = parser.isoparse("2021-08-25T12:30:45.000Z")
date_range = DateRange(
date_from="2021-08-25T12:29:30.000Z", date_to="2021-08-25T12:30:15.000Z", explicitDate=True
)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.SECOND, now=now)
self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-25T12:29:30.000Z"))
self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T12:30:15.000Z"))
def test_align_with_interval_second(self):
now = parser.isoparse("2021-08-25T12:30:45.123456Z")
query_date_range = QueryDateRange(team=self.team, date_range=None, interval=IntervalType.SECOND, now=now)
aligned = query_date_range.align_with_interval(now)
self.assertEqual(aligned, parser.isoparse("2021-08-25T12:30:45.000000Z"))
def test_second_interval_count(self):
now = parser.isoparse("2021-08-25T12:30:00.000Z")
date_range = DateRange(date_from="-300s", date_to="-0s", explicitDate=False)
query_date_range = QueryDateRange(
team=self.team, date_range=date_range, interval=IntervalType.SECOND, interval_count=10, now=now
)
values = query_date_range.all_values()
self.assertEqual(values[0], parser.isoparse("2021-08-25T12:25:00.000000Z"))
self.assertEqual(values[1], parser.isoparse("2021-08-25T12:25:10.000000Z"))
self.assertEqual(values[2], parser.isoparse("2021-08-25T12:25:20.000000Z"))
self.assertEqual(values[-1], parser.isoparse("2021-08-25T12:29:50.000000Z"))
self.assertEqual(len(values), 30)
def test_all_values_second_interval(self):
now = parser.isoparse("2021-08-25T12:30:00.000Z")
query_date_range = QueryDateRange(
team=self.team, date_range=DateRange(date_from="-10s"), interval=IntervalType.SECOND, now=now
)
values = query_date_range.all_values()
self.assertEqual(values[0], parser.isoparse("2021-08-25T12:29:50.000000Z"))
self.assertEqual(values[1], parser.isoparse("2021-08-25T12:29:51.000000Z"))
self.assertEqual(values[-1], parser.isoparse("2021-08-25T12:29:59.000000Z"))
self.assertEqual(len(values), 10)
def test_interval_count(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-66M", date_to="-6M", explicitDate=False)

View File

@@ -1807,6 +1807,7 @@ class IntegrationKind(StrEnum):
class IntervalType(StrEnum):
SECOND = "second"
MINUTE = "minute"
HOUR = "hour"
DAY = "day"

View File

@@ -195,7 +195,7 @@ def relative_date_parse_with_delta_mapping(
parsed_dt = parsed_dt.astimezone(timezone_info)
return parsed_dt, None, None
regex = r"\-?(?P<number>[0-9]+)?(?P<kind>[hdwmqyHDWMQY])(?P<position>Start|End)?"
regex = r"\-?(?P<number>[0-9]+)?(?P<kind>[hdwmqysHDWMQY])(?P<position>Start|End)?"
match = re.search(regex, input)
parsed_dt = (now or dt.datetime.now()).astimezone(timezone_info)
delta_mapping: dict[str, int] = {}
@@ -271,6 +271,9 @@ def get_delta_mapping_for(
elif kind == "M":
if number:
delta_mapping["minutes"] = int(number)
elif kind == "s":
if number:
delta_mapping["seconds"] = int(number)
elif kind == "q":
if number:
delta_mapping["weeks"] = 13 * int(number)

View File

@@ -235,11 +235,7 @@ class LogsQueryRunner(AnalyticsQueryRunner[LogsQueryResponse]):
)
_step = (qdr.date_to() - qdr.date_from()) / 50
if _step < dt.timedelta(minutes=1):
_step = dt.timedelta(minutes=1)
_step = dt.timedelta(seconds=int(60 * round(_step.total_seconds() / 60)))
interval_type = IntervalType.MINUTE
interval_type = IntervalType.SECOND
def find_closest(target, arr):
if not arr:
@@ -251,7 +247,14 @@ class LogsQueryRunner(AnalyticsQueryRunner[LogsQueryResponse]):
# set the number of intervals to a "round" number of minutes
# it's hard to reason about the rate of logs on e.g. 13 minute intervals
# the min interval is 1 minute and max interval is 1 day
interval_count = find_closest(_step.total_seconds() // 60, [1, 2, 5, 10, 15, 30, 60, 120, 240, 360, 720, 1440])
interval_count = find_closest(
_step.total_seconds(),
[1, 5] + [x * 60 for x in [1, 2, 5, 10, 15, 30, 60, 120, 240, 360, 720, 1440]],
)
if _step >= dt.timedelta(minutes=1):
interval_type = IntervalType.MINUTE
interval_count //= 60
return QueryDateRange(
date_range=self.query.dateRange,

View File

@@ -57,7 +57,7 @@ class SparklineQueryRunner(LogsQueryRunner):
) AS am
LEFT JOIN (
SELECT
toStartOfInterval(time_bucket, {one_interval_period}) AS time,
toStartOfInterval({time_field}, {one_interval_period}) AS time,
severity_text,
count() AS event_count
FROM logs
@@ -69,6 +69,9 @@ class SparklineQueryRunner(LogsQueryRunner):
""",
placeholders={
**self.query_date_range.to_placeholders(),
"time_field": ast.Field(chain=["time_bucket"])
if self.query_date_range.interval_name != "second"
else ast.Field(chain=["timestamp"]),
"where": self.where(),
},
)