chore: Step 1 to remove posthog-cloud repo (#15898)

This commit is contained in:
Ben White
2023-06-15 10:33:19 +02:00
committed by GitHub
parent fbe6ee5db2
commit 4783dec288
32 changed files with 340 additions and 448 deletions

View File

@@ -309,108 +309,6 @@ jobs:
path: posthog/tasks/test/__emails__
retention-days: 5
cloud:
needs: changes
timeout-minutes: 30
# This workflow may be skipped as a whole (as opposed to the other ones), as it's optional on forks anyway
if: github.repository == 'PostHog/posthog' && needs.changes.outputs.backend == 'true'
name: Django tests Cloud
runs-on: ubuntu-latest
steps:
- name: Fetch posthog-cloud
run: |
curl -u posthog-bot:${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} -L https://github.com/posthog/posthog-cloud/tarball/master | tar --strip-components=1 -xz --
- name: Checkout master
uses: actions/checkout@v3
with:
ref: 'master'
path: 'master/'
- name: Link posthog-cloud at master
run: |
cp -r multi_tenancy master/
cp -r messaging master/
cat multi_tenancy_settings.py > master/posthog/settings/cloud.py
cat requirements.txt >> master/requirements.txt
- name: Stop/Start stack with Docker Compose
run: |
docker compose -f master/docker-compose.dev.yml down
docker compose -f master/docker-compose.dev.yml up -d
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10.10
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
id: cache-backend-tests
- uses: syphar/restore-pip-download-cache@v1
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install SAML (python3-saml) dependencies
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
- name: Install python dependencies
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r master/requirements.txt -r master/requirements-dev.txt
- name: Wait for Clickhouse & Kafka
run: master/bin/check_kafka_clickhouse_up
# The 2-step migration process (first master, then current branch) verifies that it'll always
# be possible to migrate to the new version without problems in production
- name: Run migration on master branch
run: |
python master/manage.py migrate
- name: Checkout current branch
uses: actions/checkout@v3
with:
path: 'current/'
- name: Install requirements.txt dependencies with pip at current branch
run: |
cd current
python -m pip install -r requirements.txt -r requirements-dev.txt
- name: Link posthog-cloud at current branch
run: |
cp current/ee/conftest.py multi_tenancy/conftest.py
cp current/ee/conftest.py messaging/conftest.py
cp -r multi_tenancy current/
cp -r messaging current/
cat multi_tenancy_settings.py > current/posthog/settings/cloud.py
cat requirements.txt >> current/requirements.txt
- name: Check migrations
run: |
cd current
python manage.py makemigrations --check --dry-run
python manage.py migrate
- name: Set up needed files
run: |
cd current
mkdir -p frontend/dist
python manage.py collectstatic --noinput
touch frontend/dist/index.html
touch frontend/dist/layout.html
touch frontend/dist/exporter.html
- name: Run cloud tests (posthog-cloud)
run: |
source .env.template
cd current
pytest multi_tenancy messaging -m "not skip_on_multitenancy and not async_migrations" --durations=100 --durations-min=1.0
async-migrations:
name: Async migrations tests
needs: changes

View File

@@ -2,6 +2,7 @@ from typing import List, Optional
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.utils import timezone
@@ -29,9 +30,8 @@ class LicenseError(exceptions.APIException):
class LicenseManager(models.Manager):
def first_valid(self) -> Optional["License"]:
"""Return the highest valid license."""
# KEEP IN SYNC WITH licenseLogic.selectors.relevantLicense FOR THE ACTIVE LICENSE
valid_licenses = list(self.filter(valid_until__gte=timezone.now()))
"""Return the highest valid license or cloud licenses if any"""
valid_licenses = list(self.filter(Q(valid_until__gte=timezone.now()) | Q(plan="cloud")))
if not valid_licenses:
return None
return max(valid_licenses, key=lambda license: License.PLAN_TO_SORTING_VALUE.get(license.plan, 0))

View File

@@ -1,124 +0,0 @@
from typing import Dict, Optional
import requests
from ee.billing.billing_manager import build_billing_token
from ee.models import License
from ee.settings import BILLING_SERVICE_URL
def migrate_billing(
events_price_map: Dict[str, str],
recordings_price_id: str,
dry_run: bool = False,
limit: int = 10,
organization_id: Optional[int] = None,
ignore_ids: list = [],
) -> int:
try:
import stripe
from multi_tenancy.models import OrganizationBilling # noqa: F401
from multi_tenancy.stripe import _init_stripe # noqa: F401
except ImportError:
print("Couldn't import multi_tenancy models") # noqa T201
return 0
license = License.objects.first_valid()
if not license: # mypy
return 0
_init_stripe()
migrated_orgs = 0
try:
if organization_id:
query = OrganizationBilling.objects.filter(organization_id=organization_id)
else:
query = OrganizationBilling.objects.exclude(stripe_customer_id__isnull=True).exclude(
stripe_customer_id__exact=""
)[:limit]
for billing in query:
if str(billing.organization.id) in ignore_ids:
print("Ignoring: ", billing.organization.name) # noqa T201
continue
try:
should_delete = False
print("Migrating billing for: ", billing.organization.name) # noqa T201
billing_service_token = build_billing_token(license, billing.organization)
payload = {"stripe_customer_id_v1": billing.stripe_customer_id}
if not billing.stripe_subscription_id:
should_delete = True
elif billing.stripe_subscription_id:
subscription = stripe.Subscription.retrieve(billing.stripe_subscription_id)
if subscription["status"] == "active":
payload["stripe_subscription_id_v1"] = billing.stripe_subscription_id
items = subscription["items"]["data"]
if len(items) > 1:
# there should be only one item on old subscriptions
raise Exception("More than one item on subscription")
new_price_id = None
sub_item = items[0]
old_price_id = sub_item["price"]["id"]
if old_price_id in events_price_map:
new_price_id = events_price_map[old_price_id]
if new_price_id:
if dry_run:
print("Would have switched to new price", new_price_id) # noqa T201
print( # noqa T201
"Would have created new item with free recordings", recordings_price_id
)
else:
print("Switching to new events price") # noqa T201
# we switch the old event price id to the new one
stripe.SubscriptionItem.modify(
sub_item["id"],
price=new_price_id,
)
print("Creating new item with free recordings") # noqa T201
# we create a new item with free session recordings
stripe.SubscriptionItem.create(
subscription=billing.stripe_subscription_id,
price=recordings_price_id,
) # noqa T201
should_delete = True
else:
# we don't delete this org_billing yet as we don't have a matching price
print("No matching price found for", billing.organization.name) # noqa T201
should_delete = False
else:
print("Subscription not active for", billing.organization.name) # noqa T201
should_delete = True
if not dry_run:
res = requests.patch(
f"{BILLING_SERVICE_URL}/api/billing",
headers={"Authorization": f"Bearer {billing_service_token}"},
json=payload,
)
if res.status_code != 200:
raise Exception(res.json())
if dry_run:
print("Dry run, not deleting billing v1 for", billing.organization.name) # noqa T201
elif should_delete:
# we have done everything with this org, so we can delete it
print("Deleting billing v1 for", billing.organization.name) # noqa T201
billing.delete()
migrated_orgs += 1
except Exception as e:
raise Exception(
{
"org_id": billing.organization.id,
"org_name": billing.organization.name,
"error": e,
}
)
except Exception as e:
print("Error migrating billing", e) # noqa T201
return migrated_orgs

View File

@@ -1,10 +1,14 @@
from typing import Any, List
from django.conf import settings
from django.contrib import admin
from django.urls import include
from django.urls.conf import path
from rest_framework_extensions.routers import NestedRegistryItem
from ee.api import integration, time_to_see_data
from posthog.api.routing import DefaultRouterPlusPlus
from posthog.cloud_utils import is_cloud
from .api import (
authentication,
@@ -88,7 +92,16 @@ def extend_api_router(
)
# The admin interface is disabled on self-hosted instances, as its misuse can be unsafe
admin_urlpatterns = (
[path("admin/", include("loginas.urls")), path("admin/", admin.site.urls)]
if is_cloud() or settings.DEMO or settings.DEBUG
else []
)
urlpatterns: List[Any] = [
path("api/saml/metadata/", authentication.saml_metadata_view),
path("api/sentry_stats/", sentry_stats.sentry_stats),
*admin_urlpatterns,
]

View File

@@ -247,8 +247,6 @@ class OrganizationAdmin(admin.ModelAdmin):
"created_at",
"updated_at",
"plugins_access_level",
"billing_plan",
"organization_billing_link",
"billing_link_v2",
"usage_posthog",
"usage",
@@ -257,8 +255,6 @@ class OrganizationAdmin(admin.ModelAdmin):
readonly_fields = [
"created_at",
"updated_at",
"billing_plan",
"organization_billing_link",
"billing_link_v2",
"usage_posthog",
"usage",
@@ -270,7 +266,6 @@ class OrganizationAdmin(admin.ModelAdmin):
"plugins_access_level",
"members_count",
"first_member",
"organization_billing_link",
"billing_link_v2",
)
@@ -281,14 +276,7 @@ class OrganizationAdmin(admin.ModelAdmin):
user = organization.members.order_by("id").first()
return format_html(f'<a href="/admin/posthog/user/{user.pk}/change/">{user.email}</a>')
def organization_billing_link(self, organization: Organization) -> str:
return format_html(
'<a href="/admin/multi_tenancy/organizationbilling/{}/change/">Billing →</a>', organization.pk
)
def billing_link_v2(self, organization: Organization) -> str:
if not organization.has_billing_v2_setup:
return ""
url = f"{settings.BILLING_SERVICE_URL}/admin/billing/customer/?q={organization.pk}"
return format_html(f'<a href="{url}">Billing V2 →</a>')

View File

@@ -10,6 +10,7 @@ from rest_framework.response import Response
from posthog.async_migrations.status import async_migrations_ok
from posthog.clickhouse.system_status import dead_letter_queue_ratio_ok_cached
from posthog.cloud_utils import is_cloud
from posthog.gitsha import GIT_SHA
from posthog.permissions import SingleTenancyOrAdmin
from posthog.storage import object_storage
@@ -147,7 +148,7 @@ class InstanceStatusViewSet(viewsets.ViewSet):
{
"system_status_ok": (
# :TRICKY: Cloud alerts of services down via pagerduty
settings.MULTI_TENANCY
is_cloud()
or (
is_redis_alive()
and is_postgres_alive()

View File

@@ -38,3 +38,50 @@
'constance:posthog:SLACK_APP_SIGNING_SECRET') /*controller='posthog.views.preflight_check',route='%5E_preflight/%3F%28%3F%3A%5B%3F%23%5D.%2A%29%3F%24'*/
'
---
# name: TestPreflight.test_cloud_preflight_limited_db_queries.2
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
"posthog_team"."organization_id",
"posthog_team"."api_token",
"posthog_team"."app_urls",
"posthog_team"."name",
"posthog_team"."slack_incoming_webhook",
"posthog_team"."created_at",
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
"posthog_team"."session_recording_opt_in",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."session_recording_version",
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
"posthog_team"."path_cleaning_filters",
"posthog_team"."timezone",
"posthog_team"."data_attributes",
"posthog_team"."person_display_name_properties",
"posthog_team"."live_events_columns",
"posthog_team"."recording_domains",
"posthog_team"."primary_dashboard_id",
"posthog_team"."correlation_config",
"posthog_team"."session_recording_retention_period_days",
"posthog_team"."plugins_opt_in",
"posthog_team"."opt_out_capture",
"posthog_team"."event_names",
"posthog_team"."event_names_with_usage",
"posthog_team"."event_properties",
"posthog_team"."event_properties_with_usage",
"posthog_team"."event_properties_numerical"
FROM "posthog_team"
WHERE "posthog_team"."id" = 2
LIMIT 21 /*controller='posthog.views.preflight_check',route='%5E_preflight/%3F%28%3F%3A%5B%3F%23%5D.%2A%29%3F%24'*/
'
---

View File

@@ -65,7 +65,7 @@ class TestEvents(ClickhouseTestMixin, APIBaseTest):
# Django session, rate limit instance setting, PostHog user,
# PostHog team, PostHog org membership, person and distinct id, 4x PoE check
with self.assertNumQueries(10):
with self.assertNumQueries(9):
response = self.client.get(f"/api/projects/{self.team.id}/events/?event=event_name").json()
self.assertEqual(response["results"][0]["event"], "event_name")
@@ -78,7 +78,7 @@ class TestEvents(ClickhouseTestMixin, APIBaseTest):
)
flush_persons_and_events()
expected_queries = 12 # Django session, PostHog user, PostHog team, PostHog org membership,
expected_queries = 11 # Django session, PostHog user, PostHog team, PostHog org membership,
# look up if rate limit is enabled (cached after first lookup), 2x non-cached instance setting (MATERIALIZED_COLUMNS_ENABLED), person and distinct id
with self.assertNumQueries(expected_queries):

View File

@@ -102,7 +102,7 @@ class TestInstanceStatus(APIBaseTest):
self.user.is_staff = True
self.user.save()
with self.settings(MULTI_TENANCY=True):
with self.is_cloud(True):
response = self.client.get("/api/instance_status/navigation").json()
self.assertEqual(

View File

@@ -172,7 +172,7 @@ class TestPerson(ClickhouseTestMixin, APIBaseTest):
flush_persons_and_events()
# Filter by distinct ID
with self.assertNumQueries(12):
with self.assertNumQueries(11):
response = self.client.get("/api/person/?distinct_id=distinct_id") # must be exact matches
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.json()["results"]), 1)
@@ -667,7 +667,7 @@ class TestPerson(ClickhouseTestMixin, APIBaseTest):
create_person(team_id=self.team.pk, version=0)
returned_ids = []
with self.assertNumQueries(11):
with self.assertNumQueries(10):
response = self.client.get("/api/person/?limit=10").json()
self.assertEqual(len(response["results"]), 9)
returned_ids += [x["distinct_ids"][0] for x in response["results"]]

View File

@@ -4,6 +4,7 @@ from unittest.mock import patch
import pytest
from django.utils import timezone
from rest_framework import status
from posthog.cloud_utils import TEST_clear_cloud_cache
from posthog.models.instance_setting import set_instance_setting
from posthog.models.organization import Organization, OrganizationInvite
@@ -157,7 +158,7 @@ class TestPreflight(APIBaseTest, QueryMatchingTest):
def test_cloud_preflight_limited_db_queries(self):
with self.is_cloud(True):
# :IMPORTANT: This code is hit _every_ web request on cloud so avoid ever increasing db load.
with self.assertNumQueries(3): # session, team and slack instance setting.
with self.assertNumQueries(4): # session, user, team and slack instance setting.
response = self.client.get("/_preflight/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -263,6 +264,7 @@ class TestPreflight(APIBaseTest, QueryMatchingTest):
@pytest.mark.ee
def test_cloud_preflight_based_on_license(self):
TEST_clear_cloud_cache()
try:
from ee.models.license import License, LicenseManager
except ImportError:

View File

@@ -150,8 +150,8 @@ class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest)
@snapshot_postgres_queries
def test_listing_recordings_is_not_nplus1_for_persons(self):
# request once without counting queries to cache an ee.license lookup that makes results vary otherwise
with freeze_time("2022-06-03T12:00:00.000Z"):
# request once without counting queries to cache an ee.license lookup that makes results vary otherwise
self.client.get(f"/api/projects/{self.team.id}/session_recordings")
base_time = (now() - relativedelta(days=1)).replace(microsecond=0)

View File

@@ -11,6 +11,7 @@ from posthog.async_migrations.definition import (
from posthog.async_migrations.disk_util import analyze_enough_disk_space_free_for_table
from posthog.async_migrations.utils import run_optimize_table
from posthog.client import sync_execute
from posthog.cloud_utils import is_cloud
from posthog.constants import AnalyticsDBMS
from posthog.models.instance_setting import set_instance_setting
from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE
@@ -146,7 +147,7 @@ class Migration(AsyncMigrationDefinition):
return _operations
def is_required(self):
if settings.MULTI_TENANCY:
if is_cloud():
return False
table_engine = sync_execute(

View File

@@ -89,9 +89,6 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
# PostHog Cloud cron jobs
if is_cloud():
# TODO EC this should be triggered only for instances that haven't been migrated to the new billing
# Calculate billing usage for the day every day at midnight UTC
sender.add_periodic_task(crontab(hour=0, minute=0), calculate_billing_daily_usage.s())
# Verify that persons data is in sync every day at 4 AM UTC
sender.add_periodic_task(crontab(hour=4, minute=0), verify_persons_data_in_sync.s())
@@ -741,16 +738,6 @@ def count_teams_with_no_property_query_count():
logger.error("calculate_event_property_usage.count_teams_failed", exc=exc, exc_info=True)
@app.task(ignore_result=True)
def calculate_billing_daily_usage():
try:
from multi_tenancy.tasks import compute_daily_usage_for_organizations # noqa: F401
except ImportError:
pass
else:
compute_daily_usage_for_organizations()
@app.task(ignore_result=True)
def calculate_decide_usage() -> None:
from posthog.models.feature_flag.flag_analytics import capture_team_decide_usage

View File

@@ -16,9 +16,6 @@ def is_cloud():
if isinstance(is_cloud_cached, bool):
return is_cloud_cached
# Until billing-v2 is fully migrated, multi-tenancy take priority
is_cloud_cached = str(settings.MULTI_TENANCY).lower() in ("true", "1")
if not is_cloud_cached:
try:
# NOTE: Important not to import this from ee.models as that will cause a circular import for celery
@@ -38,6 +35,6 @@ def is_cloud():
# NOTE: This is purely for testing purposes
def TEST_clear_cloud_cache():
def TEST_clear_cloud_cache(value: Optional[bool] = None):
global is_cloud_cached
is_cloud_cached = None
is_cloud_cached = value

View File

@@ -1,44 +0,0 @@
import json
from django.core.management.base import BaseCommand
from ee.tasks.migrate_billing import migrate_billing
class Command(BaseCommand):
help = "Migrate cloud billing v1 orgs to v2"
def add_arguments(self, parser):
parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it")
parser.add_argument("--events_price_map", type=str, help="Map of old to new price IDs")
parser.add_argument("--recordings_price_id", type=str, help="Price ID for free recordings")
parser.add_argument("--limit", type=int, help="Limit the number of orgs to process")
parser.add_argument("--organization-id", type=str, help="Only migrate this organization ID")
parser.add_argument("--ignore-ids", type=str, help="Comma-separated list of org IDs to ignore")
def handle(self, *args, **options):
dry_run = options["dry_run"]
events_price_map = json.loads(options["events_price_map"])
recordings_price_id = options["recordings_price_id"]
limit = options["limit"]
organization_id = options["organization_id"]
ignore_ids = options["ignore_ids"]
if ignore_ids:
ignore_ids = ignore_ids.split(",")
else:
ignore_ids = []
migrated_orgs = migrate_billing(
events_price_map,
recordings_price_id,
dry_run=dry_run,
limit=limit,
organization_id=organization_id,
ignore_ids=ignore_ids,
)
if dry_run:
print("Dry run so not migrated.") # noqa T201
else:
print(f"{migrated_orgs} orgs migrated!") # noqa T201
print("Done!") # noqa T201

View File

@@ -5,6 +5,7 @@ from typing import Any, Callable, List, Optional, cast
import structlog
from corsheaders.middleware import CorsMiddleware
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.exceptions import MiddlewareNotUsed
from django.db import connection
from django.db.models import QuerySet
@@ -12,22 +13,23 @@ from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware
from django.urls import resolve
from django.utils.cache import add_never_cache_headers
from django_prometheus.middleware import PrometheusAfterMiddleware, PrometheusBeforeMiddleware, Metrics
from django_prometheus.middleware import Metrics, PrometheusAfterMiddleware, PrometheusBeforeMiddleware
from rest_framework import status
from statshog.defaults.django import statsd
from posthog.api.capture import get_event
from posthog.api.decide import get_decide
from posthog.clickhouse.client.execute import clickhouse_query_counter
from posthog.clickhouse.query_tagging import QueryCounter, reset_query_tags, tag_queries
from posthog.cloud_utils import is_cloud
from posthog.exceptions import generate_exception_response
from posthog.metrics import LABEL_TEAM_ID
from posthog.models import Action, Cohort, Dashboard, FeatureFlag, Insight, Team, User
from posthog.rate_limit import DecideRateThrottle
from posthog.settings import SITE_URL
from posthog.settings.statsd import STATSD_HOST
from posthog.user_permissions import UserPermissions
from posthog.utils import cors_response
from rest_framework import status
from .auth import PersonalAPIKeyAuthentication
@@ -43,6 +45,17 @@ ALWAYS_ALLOWED_ENDPOINTS = [
"_health",
]
default_cookie_options = {
"max_age": 365 * 24 * 60 * 60, # one year
"expires": None,
"path": "/",
"domain": "posthog.com",
"secure": True,
"samesite": "Strict",
}
cookie_api_paths_to_ignore = {"e", "s", "capture", "batch", "decide", "api", "track"}
class AllowIPMiddleware:
trusted_proxies: List[str] = []
@@ -478,3 +491,61 @@ class PrometheusAfterMiddlewareWithTeamIds(PrometheusAfterMiddleware):
new_labels = {LABEL_TEAM_ID: team_id}
new_labels.update(labels)
return super().label_metric(metric, request, response=response, **new_labels)
class PostHogTokenCookieMiddleware(SessionMiddleware):
"""
Adds two secure cookies to enable auto-filling the current project token on the docs.
"""
def process_response(self, request, response):
response = super().process_response(request, response)
if not is_cloud():
return response
# skip adding the cookie on API requests
split_request_path = request.path.split("/")
if len(split_request_path) and split_request_path[1] in cookie_api_paths_to_ignore:
return response
if request.path.startswith("/logout"):
# clears the cookies that were previously set
response.delete_cookie("ph_current_project_token", domain=default_cookie_options["domain"])
response.delete_cookie("ph_current_project_name", domain=default_cookie_options["domain"])
response.delete_cookie("ph_current_instance", domain=default_cookie_options["domain"])
if request.user and request.user.is_authenticated and request.user.team:
response.set_cookie(
key="ph_current_project_token",
value=request.user.team.api_token,
max_age=365 * 24 * 60 * 60,
expires=default_cookie_options["expires"],
path=default_cookie_options["path"],
domain=default_cookie_options["domain"],
secure=default_cookie_options["secure"],
samesite=default_cookie_options["samesite"],
)
response.set_cookie(
key="ph_current_project_name", # clarify which project is active (orgs can have multiple projects)
value=request.user.team.name.encode("utf-8").decode("latin-1"),
max_age=365 * 24 * 60 * 60,
expires=default_cookie_options["expires"],
path=default_cookie_options["path"],
domain=default_cookie_options["domain"],
secure=default_cookie_options["secure"],
samesite=default_cookie_options["samesite"],
)
response.set_cookie(
key="ph_current_instance", # clarify which project is active (orgs can have multiple projects)
value=SITE_URL,
max_age=365 * 24 * 60 * 60,
expires=default_cookie_options["expires"],
path=default_cookie_options["path"],
domain=default_cookie_options["domain"],
secure=default_cookie_options["secure"],
samesite=default_cookie_options["samesite"],
)
return response

View File

@@ -1,6 +1,5 @@
# Generated by Django 3.0.6 on 2021-02-25 16:03
from django.conf import settings
from django.contrib.postgres import fields
from django.db import migrations, models
@@ -17,7 +16,7 @@ class Migration(migrations.Migration):
name="plugins_access_level",
field=models.PositiveSmallIntegerField(
choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")],
default=0 if settings.MULTI_TENANCY else 9,
default=9,
),
),
migrations.AddField(

View File

@@ -1,6 +1,5 @@
# Generated by Django 3.0.11 on 2021-03-19 13:07
from django.conf import settings
from django.db import migrations, models
@@ -16,7 +15,7 @@ class Migration(migrations.Migration):
name="plugins_access_level",
field=models.PositiveSmallIntegerField(
choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")],
default=3 if settings.MULTI_TENANCY else 9,
default=9,
),
),
]

View File

@@ -1,7 +1,8 @@
# Generated by Django 3.2.16 on 2023-04-27 09:04
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
import posthog.models.utils

View File

@@ -1,7 +1,7 @@
# Generated by Django 3.2.16 on 2023-05-11 11:04
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -158,11 +158,6 @@ class Organization(UUIDModel):
# Demo gets all features
if settings.DEMO or "generate_demo_data" in sys.argv[1:2]:
return (License.ENTERPRISE_PLAN, "demo")
# If on Cloud, grab the organization's price
if hasattr(self, "billing"):
if self.billing is None: # type: ignore
return (None, None)
return (self.billing.get_plan_key(), "cloud") # type: ignore
# Otherwise, try to find a valid license on this instance
if License is not None:
license = License.objects.first_valid()
@@ -170,42 +165,35 @@ class Organization(UUIDModel):
return (license.plan, "ee")
return (None, None)
@property
def billing_plan(self) -> Optional[str]:
return self._billing_plan_details[0]
def update_available_features(self) -> List[Union[AvailableFeature, str]]:
"""Updates field `available_features`. Does not `save()`."""
# TODO BW: Get available features from billing service
if self.has_billing_v2_setup:
# Usage indicates billing V2 - we don't update billing as that is done
# whenever the billing service is called
if is_cloud() or self.usage:
# Since billing V2 we just use the available features which are updated when the billing service is called
return self.available_features
plan, realm = self._billing_plan_details
if not plan:
try:
from ee.models.license import License
except ImportError:
self.available_features = []
elif realm in ("ee", "demo"):
try:
from ee.models.license import License
except ImportError:
License = None # type: ignore
self.available_features = License.PLANS.get(plan, [])
return []
self.available_features = []
# Self hosted legacy license so we just sync the license features
# Demo gets all features
if settings.DEMO or "generate_demo_data" in sys.argv[1:2]:
self.available_features = License.PLANS.get(License.ENTERPRISE_PLAN, [])
else:
self.available_features = self.billing.available_features # type: ignore
# Otherwise, try to find a valid license on this instance
license = License.objects.first_valid()
if license:
self.available_features = License.PLANS.get(license.plan, [])
return self.available_features
def is_feature_available(self, feature: Union[AvailableFeature, str]) -> bool:
return feature in self.available_features
@property
def has_billing_v2_setup(self):
if hasattr(self, "billing") and self.billing.stripe_subscription_id: # type: ignore
return False
return self.usage is not None
@property
def active_invites(self) -> QuerySet:
return self.invites.filter(created_at__gte=timezone.now() - timezone.timedelta(days=INVITE_DAYS_VALIDITY))

View File

@@ -68,10 +68,8 @@ class TestOrganization(BaseTest):
new_org.available_features = ["test1", "test2"]
new_org.update_available_features()
assert new_org.available_features == []
assert not new_org.has_billing_v2_setup
new_org.available_features = ["test1", "test2"]
new_org.usage = {"events": {"usage": 1000, "limit": None}}
new_org.update_available_features()
assert new_org.available_features == ["test1", "test2"]
assert new_org.has_billing_v2_setup

View File

@@ -28,7 +28,6 @@ class TestUser(BaseTest):
"project_count": 1,
"team_member_count_all": 1,
"completed_onboarding_once": False,
"billing_plan": None,
"organization_id": str(organization.id),
"project_id": str(team.uuid),
"project_setup_complete": False,
@@ -64,7 +63,6 @@ class TestUser(BaseTest):
"project_count": 2,
"team_member_count_all": 2,
"completed_onboarding_once": True,
"billing_plan": None,
"organization_id": str(self.organization.id),
"project_id": str(self.team.uuid),
"project_setup_complete": True,

View File

@@ -271,7 +271,6 @@ class User(AbstractUser, UUIDClassicModel):
completed_snippet_onboarding=True, ingested_event=True
).exists(), # has completed the onboarding at least for one project
# properties dependent on current project / org below
"billing_plan": self.organization.billing_plan if self.organization else None,
"organization_id": str(self.organization.id) if self.organization else None,
"project_id": str(self.team.uuid) if self.team else None,
"project_setup_complete": project_setup_complete,

View File

@@ -90,10 +90,6 @@ HOOK_EVENTS: Dict[str, str] = {}
# Support creating multiple organizations in a single instance. Requires a premium license.
MULTI_ORG_ENABLED = get_from_env("MULTI_ORG_ENABLED", False, type_cast=str_to_bool)
# DEPRECATED - replaced by cloud license
# Overriden by posthog-cloud
MULTI_TENANCY = False
BILLING_V2_ENABLED = get_from_env("BILLING_V2_ENABLED", False, type_cast=str_to_bool)
AUTO_LOGIN = get_from_env("AUTO_LOGIN", False, type_cast=str_to_bool)
@@ -106,5 +102,6 @@ PROM_PUSHGATEWAY_ADDRESS = os.getenv("PROM_PUSHGATEWAY_ADDRESS", None)
if "ee.apps.EnterpriseConfig" in INSTALLED_APPS:
from ee.settings import * # noqa: F401, F403
# TODO: We can remove this line and essentially disable all posthog-cloud specific code before we remove the image all-together
# Lastly, cloud settings override and modify all
from posthog.settings.cloud import * # noqa: F401, E402

View File

@@ -1,16 +1 @@
# Overridden in posthog-cloud
import sys
import structlog
from posthog.settings.utils import get_from_env, str_to_bool
logger = structlog.get_logger(__name__)
# TODO BW: Before we can remove posthog-cloud we need to remove this file
# Early exit to avoid issues with cloud not being properly included
if get_from_env("MULTI_TENANCY", False, type_cast=str_to_bool):
logger.critical(("Environment variable MULTI_TENANCY is set, but cloud settings have not been included",))
sys.exit("[ERROR] Stopping Django server…\n")
# NOTE: This file is empty and is overwritten by posthog-cloud. Once we move away from it this file can be deleted.

View File

@@ -92,8 +92,10 @@ MIDDLEWARE = [
"posthog.middleware.AutoProjectMiddleware",
"posthog.middleware.CHQueries",
"posthog.middleware.PrometheusAfterMiddlewareWithTeamIds",
"posthog.middleware.PostHogTokenCookieMiddleware",
]
if STATSD_HOST is not None:
MIDDLEWARE.insert(0, "django_statsd.middleware.StatsdMiddleware")
MIDDLEWARE.append("django_statsd.middleware.StatsdMiddlewareTimer")

View File

@@ -22,7 +22,7 @@ from posthog import rate_limit
from posthog.clickhouse.client import sync_execute
from posthog.clickhouse.client.connection import ch_pool
from posthog.clickhouse.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL
from posthog.cloud_utils import TEST_clear_cloud_cache
from posthog.cloud_utils import TEST_clear_cloud_cache, is_cloud
from posthog.models import Dashboard, DashboardTile, Insight, Organization, Team, User
from posthog.models.cohort.sql import TRUNCATE_COHORTPEOPLE_TABLE_SQL
from posthog.models.event.sql import DISTRIBUTED_EVENTS_TABLE_SQL, DROP_EVENTS_TABLE_SQL, EVENTS_TABLE_SQL
@@ -202,9 +202,14 @@ class BaseTest(TestMixin, ErrorResponsesMixin, TestCase):
Read more: https://docs.djangoproject.com/en/3.1/topics/testing/tools/#testcase
"""
@contextmanager
def is_cloud(self, value: bool):
TEST_clear_cloud_cache()
return self.settings(MULTI_TENANCY=value)
previous_value = is_cloud()
try:
TEST_clear_cloud_cache(value)
yield value
finally:
TEST_clear_cloud_cache(previous_value)
class NonAtomicBaseTest(TestMixin, ErrorResponsesMixin, TransactionTestCase):
@@ -224,11 +229,15 @@ class APIBaseTest(TestMixin, ErrorResponsesMixin, DRFTestCase):
Functional API tests using Django REST Framework test suite.
"""
initial_cloud_mode: Optional[bool] = False
def setUp(self):
super().setUp()
# Clear the cached "is_cloud" setting so that it's recalculated for each test
TEST_clear_cloud_cache()
TEST_clear_cloud_cache(self.initial_cloud_mode)
# Sets the cloud mode to stabilise things tests, especially num query counts
# Clear the is_rate_limit lru_Caches so that they does not flap in test snapshots
rate_limit.is_rate_limit_enabled.cache_clear()
rate_limit.get_team_allow_list.cache_clear()
@@ -246,9 +255,15 @@ class APIBaseTest(TestMixin, ErrorResponsesMixin, DRFTestCase):
with assert_faster_than(duration_ms):
yield
@contextmanager
def is_cloud(self, value: bool):
TEST_clear_cloud_cache()
return self.settings(MULTI_TENANCY=value)
# Typically the is_cloud setting is controlled by License but we need to be able to override it for tests
previous_value = is_cloud()
try:
TEST_clear_cloud_cache(value)
yield value
finally:
TEST_clear_cloud_cache(previous_value)
def stripResponse(response, remove=("action", "label", "persons_urls", "filter")):

View File

@@ -14,53 +14,22 @@ class TestCloudUtils(BaseTest):
@pytest.mark.ee
def test_is_cloud_returns_correctly(self):
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY=True):
assert is_cloud() is True
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY=False):
assert not is_cloud()
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY=False):
assert not is_cloud()
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY="True"):
assert is_cloud() is True
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY="False"):
assert is_cloud() is False
assert is_cloud() is False
@pytest.mark.ee
def test_is_cloud_checks_license(self):
assert is_cloud() is False
License.objects.create(key="key", plan="cloud", valid_until=datetime.now() + timedelta(days=30))
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY=None):
assert is_cloud()
@pytest.mark.ee
def test_is_cloud_prefers_env_var(self):
License.objects.create(key="key", plan="enterprise", valid_until=datetime.now() + timedelta(days=30))
TEST_clear_cloud_cache()
with self.settings(MULTI_TENANCY=True):
assert is_cloud()
assert is_cloud()
@pytest.mark.ee
def test_is_cloud_caches_result(self):
TEST_clear_cloud_cache()
assert not is_cloud()
assert not is_cloud()
with self.settings(MULTI_TENANCY=True):
assert is_cloud()
License.objects.create(key="key", plan="cloud", valid_until=datetime.now() + timedelta(days=30))
with self.settings(MULTI_TENANCY=False):
assert is_cloud()
License.objects.create(key="key", plan="enterprise", valid_until=datetime.now() + timedelta(days=30))
with self.settings(MULTI_TENANCY=None):
assert is_cloud()
assert not is_cloud()

View File

@@ -1,9 +1,12 @@
from django.conf import settings
import json
from urllib.parse import quote
from rest_framework import status
from django.test.client import Client
from posthog.models import Action, Cohort, Dashboard, FeatureFlag, Insight
from posthog.models.organization import Organization
from posthog.models.team import Team
from posthog.settings import SITE_URL
from posthog.test.base import APIBaseTest, override_settings
@@ -92,9 +95,7 @@ class TestAutoProjectMiddleware(APIBaseTest):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.base_app_num_queries = 42
if settings.MULTI_TENANCY:
cls.base_app_num_queries += 2
cls.base_app_num_queries = 41
# Create another team that the user does have access to
cls.second_team = Team.objects.create(organization=cls.organization, name="Second Life")
@@ -245,3 +246,126 @@ class TestAutoProjectMiddleware(APIBaseTest):
self.assertEqual(response_app.status_code, 200)
self.assertEqual(response_users_api.status_code, 200)
self.assertEqual(response_users_api_data.get("team", {}).get("id"), self.team.id)
class TestPostHogTokenCookieMiddleware(APIBaseTest):
initial_cloud_mode = True
CONFIG_AUTO_LOGIN = False
def test_logged_out_client(self):
self.client.logout()
response = self.client.get("/")
self.assertEqual(0, len(response.cookies))
def test_logged_in_client(self):
self.client.force_login(self.user)
response = self.client.get("/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
ph_project_token_cookie = response.cookies["ph_current_project_token"]
self.assertEqual(ph_project_token_cookie.key, "ph_current_project_token")
self.assertEqual(ph_project_token_cookie.value, self.team.api_token)
self.assertEqual(ph_project_token_cookie["path"], "/")
self.assertEqual(ph_project_token_cookie["samesite"], "Strict")
self.assertEqual(ph_project_token_cookie["httponly"], "")
self.assertEqual(ph_project_token_cookie["domain"], "posthog.com")
self.assertEqual(ph_project_token_cookie["comment"], "")
self.assertEqual(ph_project_token_cookie["secure"], True)
self.assertEqual(ph_project_token_cookie["max-age"], 31536000)
ph_project_name_cookie = response.cookies["ph_current_project_name"]
self.assertEqual(ph_project_name_cookie.key, "ph_current_project_name")
self.assertEqual(ph_project_name_cookie.value, self.team.name)
self.assertEqual(ph_project_name_cookie["path"], "/")
self.assertEqual(ph_project_name_cookie["samesite"], "Strict")
self.assertEqual(ph_project_name_cookie["httponly"], "")
self.assertEqual(ph_project_name_cookie["domain"], "posthog.com")
self.assertEqual(ph_project_name_cookie["comment"], "")
self.assertEqual(ph_project_name_cookie["secure"], True)
self.assertEqual(ph_project_name_cookie["max-age"], 31536000)
ph_instance_cookie = response.cookies["ph_current_instance"]
self.assertEqual(ph_instance_cookie.key, "ph_current_instance")
self.assertEqual(ph_instance_cookie.value, SITE_URL)
self.assertEqual(ph_instance_cookie["path"], "/")
self.assertEqual(ph_instance_cookie["samesite"], "Strict")
self.assertEqual(ph_instance_cookie["httponly"], "")
self.assertEqual(ph_instance_cookie["domain"], "posthog.com")
self.assertEqual(ph_instance_cookie["comment"], "")
self.assertEqual(ph_instance_cookie["secure"], True)
self.assertEqual(ph_instance_cookie["max-age"], 31536000)
def test_ph_project_cookies_are_not_set_on_capture_or_api_endpoints(self):
self.client.logout()
data = {
"event": "user did custom action",
"properties": {"distinct_id": 2, "token": self.team.api_token},
}
response = self.client.get(
"/e/?data=%s" % quote(json.dumps(data)),
HTTP_ORIGIN="https://localhost",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(0, len(response.cookies)) # no cookies are set
django_client = Client()
response = django_client.post(
"/track/",
{
"data": json.dumps(
[
{
"event": "beep",
"properties": {
"distinct_id": "eeee",
"token": self.team.api_token,
},
}
]
),
"api_key": self.team.api_token,
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(0, len(response.cookies)) # no cookies are set
self.client.force_login(self.user)
response = self.client.get("/api/users/@me/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(0, len(response.cookies)) # no cookies are set
response = self.client.patch("/api/users/@me/", {"first_name": "Alice"}, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(0, len(response.cookies)) # no cookies are set
def test_logout(self):
self.client.force_login(self.user)
response = self.client.get("/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.cookies["ph_current_project_token"].key, "ph_current_project_token")
self.assertEqual(response.cookies["ph_current_project_token"].value, self.team.api_token)
self.assertEqual(response.cookies["ph_current_project_token"]["max-age"], 31536000)
self.assertEqual(response.cookies["ph_current_project_name"].key, "ph_current_project_name")
self.assertEqual(response.cookies["ph_current_project_name"].value, self.team.name)
self.assertEqual(response.cookies["ph_current_project_name"]["max-age"], 31536000)
response = self.client.get("/logout")
# Check that the local cookies will be removed by having 'expires' in the past
self.assertTrue(response.cookies["ph_current_project_token"]["expires"] == "Thu, 01 Jan 1970 00:00:00 GMT")
self.assertTrue(response.cookies["ph_current_project_name"]["expires"] == "Thu, 01 Jan 1970 00:00:00 GMT")
self.assertTrue(response.cookies["ph_current_instance"]["expires"] == "Thu, 01 Jan 1970 00:00:00 GMT")
# Request a page after logging out
response = self.client.get("/")
# Check if the cookies are not present in the response
self.assertNotIn("ph_current_project_token", response.cookies)
self.assertNotIn("ph_current_project_name", response.cookies)
self.assertNotIn("ph_current_instance", response.cookies)

View File

@@ -2,14 +2,15 @@ from typing import Any, Callable, List, Optional, cast
from urllib.parse import urlparse
from django.conf import settings
from django.contrib import admin
from django.http import HttpRequest, HttpResponse, HttpResponseServerError
from django.template import loader
from django.urls import URLPattern, include, path, re_path
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie, requires_csrf_token
from django_prometheus.exports import ExportToDjangoView
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView
from sentry_sdk import last_event_id
from two_factor.urls import urlpatterns as tf_urls
from django.template import loader
from posthog.api import (
api_not_found,
authentication,
@@ -27,12 +28,10 @@ from posthog.api import (
uploaded_media,
user,
)
from sentry_sdk import last_event_id
from posthog.api.decide import hostname_in_allowed_url_list
from posthog.api.prompt import prompt_webhook
from posthog.api.early_access_feature import early_access_features
from posthog.api.prompt import prompt_webhook
from posthog.api.survey import surveys
from posthog.cloud_utils import is_cloud
from posthog.demo.legacy import demo_route
from posthog.models import User
@@ -56,22 +55,6 @@ else:
)
try:
# See https://github.com/PostHog/posthog-cloud/blob/master/multi_tenancy/router.py
from multi_tenancy.router import extend_api_router as extend_api_router_cloud # noqa
except ImportError:
pass
else:
extend_api_router_cloud(router, organizations_router=organizations_router, projects_router=projects_router)
# The admin interface is disabled on self-hosted instances, as its misuse can be unsafe
admin_urlpatterns = (
[path("admin/", include("loginas.urls")), path("admin/", admin.site.urls)]
if is_cloud() or settings.DEMO or settings.DEBUG
else []
)
@requires_csrf_token
def handler500(request):
"""
@@ -140,8 +123,6 @@ urlpatterns = [
opt_slash_path("_preflight", preflight_check),
# ee
*ee_urlpatterns,
# admin
*admin_urlpatterns,
# api
path("api/unsubscribe", unsubscribe.unsubscribe),
path("api/", include(router.urls)),