Prechádzať zdrojové kódy

delete lighthouse and package size benchmarks (#5230)

Khaleel Al-Adhami 1 týždeň pred
rodič
commit
bc16bbf211

+ 0 - 122
.github/workflows/benchmarks.yml

@@ -1,122 +0,0 @@
-name: benchmarking
-
-on:
-  pull_request:
-    types:
-      - closed
-    paths-ignore:
-      - "**/*.md"
-
-permissions:
-  contents: read
-
-defaults:
-  run:
-    shell: bash
-
-env:
-  PYTHONIOENCODING: "utf8"
-  TELEMETRY_ENABLED: false
-  NODE_OPTIONS: "--max_old_space_size=8192"
-  PR_TITLE: ${{ github.event.pull_request.title }}
-
-jobs:
-  reflex-web:
-    #    if: github.event.pull_request.merged == true
-    strategy:
-      fail-fast: false
-
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@v4
-      - uses: ./.github/actions/setup_build_env
-        with:
-          python-version: 3.13
-          run-uv-sync: true
-
-      - name: Clone Reflex Website Repo
-        uses: actions/checkout@v4
-        with:
-          repository: reflex-dev/reflex-web
-          ref: main
-          path: reflex-web
-
-      - name: Install Requirements for reflex-web
-        working-directory: ./reflex-web
-        run: uv pip install -r requirements.txt
-      - name: Init Website for reflex-web
-        working-directory: ./reflex-web
-        run: uv run reflex init
-      - name: Install LightHouse Pre-reqs / Run LightHouse
-        run: |
-          # Check that npm is home
-          npm -v
-          uv run bash benchmarks/lighthouse.sh ./reflex-web prod
-        env:
-          LHCI_GITHUB_APP_TOKEN: $
-      - name: Run Benchmarks
-        # Only run if the database creds are available in this context.
-        run: uv run python benchmarks/benchmark_lighthouse.py "$GITHUB_SHA" ./integration/benchmarks/.lighthouseci
-        env:
-          GITHUB_SHA: ${{ github.sha }}
-
-  reflex-dist-size: # This job is used to calculate the size of the Reflex distribution (wheel file)
-    if: github.event.pull_request.merged == true
-    timeout-minutes: 30
-    strategy:
-      # Prioritize getting more information out of the workflow (even if something fails)
-      fail-fast: false
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@v4
-      - uses: ./.github/actions/setup_build_env
-        with:
-          python-version: 3.13
-          run-uv-sync: true
-
-      - name: Build reflex
-        run: |
-          uv build
-      - name: Upload benchmark results
-        # Only run if the database creds are available in this context.
-        run:
-          uv run python benchmarks/benchmark_package_size.py --os ubuntu-latest
-          --python-version 3.13 --commit-sha "${{ github.sha }}" --pr-id "${{ github.event.pull_request.id }}"
-          --branch-name "${{ github.head_ref || github.ref_name }}"
-          --path ./dist
-
-  reflex-venv-size: # This job calculates the total size of Reflex and its dependencies
-    if: github.event.pull_request.merged == true
-    timeout-minutes: 30
-    strategy:
-      # Prioritize getting more information out of the workflow (even if something fails)
-      fail-fast: false
-      matrix:
-        # Show OS combos first in GUI
-        os: [ubuntu-latest, windows-latest, macos-latest]
-
-    runs-on: ${{ matrix.os }}
-    steps:
-      - uses: actions/checkout@v4
-      - name: Set up python
-        id: setup-python
-        uses: actions/setup-python@v5
-        with:
-          python-version: 3.13
-      - name: Install UV
-        uses: astral-sh/setup-uv@v5
-        with:
-          version: 0.6.9
-
-      - name: Run uv sync
-        shell: bash
-        run: |
-          uv sync --no-install-project --no-dev
-
-      - name: calculate and upload size
-        run:
-          uv run python benchmarks/benchmark_package_size.py --os "${{ matrix.os }}"
-          --python-version "3.13" --commit-sha "${{ github.sha }}"
-          --pr-id "${{ github.event.pull_request.id }}"
-          --branch-name "${{ github.head_ref || github.ref_name }}"
-          --path ./.venv

+ 0 - 3
benchmarks/__init__.py

@@ -1,3 +0,0 @@
-"""Reflex benchmarks."""
-
-WINDOWS_SKIP_REASON = "Takes too much time as a result of npm"

+ 0 - 147
benchmarks/benchmark_compile_times.py

@@ -1,147 +0,0 @@
-"""Extracts the compile times from the JSON files in the specified directory and inserts them into the database."""
-
-from __future__ import annotations
-
-import argparse
-import json
-import os
-from pathlib import Path
-
-from utils import send_data_to_posthog
-
-
-def extract_stats_from_json(json_file: str) -> list[dict]:
-    """Extracts the stats from the JSON data and returns them as a list of dictionaries.
-
-    Args:
-        json_file: The JSON file to extract the stats data from.
-
-    Returns:
-        list[dict]: The stats for each test.
-    """
-    with Path(json_file).open() as file:
-        json_data = json.load(file)
-
-    # Load the JSON data if it is a string, otherwise assume it's already a dictionary
-    data = json.loads(json_data) if isinstance(json_data, str) else json_data
-
-    # Initialize an empty list to store the stats for each test
-    test_stats = []
-
-    # Iterate over each test in the 'benchmarks' list
-    for test in data.get("benchmarks", []):
-        group = test.get("group", None)
-        stats = test.get("stats", {})
-        full_name = test.get("fullname")
-        file_name = (
-            full_name.split("/")[-1].split("::")[0].strip(".py") if full_name else None
-        )
-        test_name = test.get("name", "Unknown Test")
-
-        test_stats.append(
-            {
-                "test_name": test_name,
-                "group": group,
-                "stats": stats,
-                "full_name": full_name,
-                "file_name": file_name,
-            }
-        )
-    return test_stats
-
-
-def insert_benchmarking_data(
-    os_type_version: str,
-    python_version: str,
-    performance_data: list[dict],
-    commit_sha: str,
-    pr_title: str,
-    branch_name: str,
-    event_type: str,
-    pr_id: str,
-):
-    """Insert the benchmarking data into the database.
-
-    Args:
-        os_type_version: The OS type and version to insert.
-        python_version: The Python version to insert.
-        performance_data: The performance data of reflex web to insert.
-        commit_sha: The commit SHA to insert.
-        pr_title: The PR title to insert.
-        branch_name: The name of the branch.
-        event_type: Type of github event(push, pull request, etc).
-        pr_id: Id of the PR.
-    """
-    # Prepare the event data
-    properties = {
-        "os": os_type_version,
-        "python_version": python_version,
-        "distinct_id": commit_sha,
-        "pr_title": pr_title,
-        "branch_name": branch_name,
-        "event_type": event_type,
-        "performance": performance_data,
-        "pr_id": pr_id,
-    }
-
-    send_data_to_posthog("simple_app_benchmark", properties)
-
-
-def main():
-    """Runs the benchmarks and inserts the results."""
-    # Get the commit SHA and JSON directory from the command line arguments
-    parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
-    parser.add_argument(
-        "--os", help="The OS type and version to insert into the database."
-    )
-    parser.add_argument(
-        "--python-version", help="The Python version to insert into the database."
-    )
-    parser.add_argument(
-        "--commit-sha", help="The commit SHA to insert into the database."
-    )
-    parser.add_argument(
-        "--benchmark-json",
-        help="The JSON file containing the benchmark results.",
-    )
-    parser.add_argument(
-        "--pr-title",
-        help="The PR title to insert into the database.",
-    )
-    parser.add_argument(
-        "--branch-name",
-        help="The current branch",
-        required=True,
-    )
-    parser.add_argument(
-        "--event-type",
-        help="The github event type",
-        required=True,
-    )
-    parser.add_argument(
-        "--pr-id",
-        help="ID of the PR.",
-        required=True,
-    )
-    args = parser.parse_args()
-
-    # Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
-    pr_title = args.pr_title or os.getenv("PR_TITLE", "")
-
-    # Get the results of pytest benchmarks
-    cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
-    # Insert the data into the database
-    insert_benchmarking_data(
-        os_type_version=args.os,
-        python_version=args.python_version,
-        performance_data=cleaned_benchmark_results,
-        commit_sha=args.commit_sha,
-        pr_title=pr_title,
-        branch_name=args.branch_name,
-        event_type=args.event_type,
-        pr_id=args.pr_id,
-    )
-
-
-if __name__ == "__main__":
-    main()

+ 0 - 128
benchmarks/benchmark_imports.py

@@ -1,128 +0,0 @@
-"""Extract and upload benchmarking data to PostHog."""
-
-from __future__ import annotations
-
-import argparse
-import json
-import os
-from pathlib import Path
-
-from utils import send_data_to_posthog
-
-
-def extract_stats_from_json(json_file: str) -> dict:
-    """Extracts the stats from the JSON data and returns them as dictionaries.
-
-    Args:
-        json_file: The JSON file to extract the stats data from.
-
-    Returns:
-        dict: The stats for each test.
-    """
-    with Path(json_file).open() as file:
-        json_data = json.load(file)
-
-    # Load the JSON data if it is a string, otherwise assume it's already a dictionary
-    data = json.loads(json_data) if isinstance(json_data, str) else json_data
-
-    result = data.get("results", [{}])[0]
-    return {
-        k: v
-        for k, v in result.items()
-        if k in ("mean", "stddev", "median", "min", "max")
-    }
-
-
-def insert_benchmarking_data(
-    os_type_version: str,
-    python_version: str,
-    performance_data: dict,
-    commit_sha: str,
-    pr_title: str,
-    branch_name: str,
-    pr_id: str,
-    app_name: str,
-):
-    """Insert the benchmarking data into the database.
-
-    Args:
-        os_type_version: The OS type and version to insert.
-        python_version: The Python version to insert.
-        performance_data: The imports performance data to insert.
-        commit_sha: The commit SHA to insert.
-        pr_title: The PR title to insert.
-        branch_name: The name of the branch.
-        pr_id: Id of the PR.
-        app_name: The name of the app being measured.
-    """
-    properties = {
-        "os": os_type_version,
-        "python_version": python_version,
-        "distinct_id": commit_sha,
-        "pr_title": pr_title,
-        "branch_name": branch_name,
-        "pr_id": pr_id,
-        "performance": performance_data,
-        "app_name": app_name,
-    }
-
-    send_data_to_posthog("import_benchmark", properties)
-
-
-def main():
-    """Runs the benchmarks and inserts the results."""
-    # Get the commit SHA and JSON directory from the command line arguments
-    parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
-    parser.add_argument(
-        "--os", help="The OS type and version to insert into the database."
-    )
-    parser.add_argument(
-        "--python-version", help="The Python version to insert into the database."
-    )
-    parser.add_argument(
-        "--commit-sha", help="The commit SHA to insert into the database."
-    )
-    parser.add_argument(
-        "--benchmark-json",
-        help="The JSON file containing the benchmark results.",
-    )
-    parser.add_argument(
-        "--pr-title",
-        help="The PR title to insert into the database.",
-    )
-    parser.add_argument(
-        "--branch-name",
-        help="The current branch",
-        required=True,
-    )
-    parser.add_argument(
-        "--app-name",
-        help="The name of the app measured.",
-        required=True,
-    )
-    parser.add_argument(
-        "--pr-id",
-        help="ID of the PR.",
-        required=True,
-    )
-    args = parser.parse_args()
-
-    # Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
-    pr_title = args.pr_title or os.getenv("PR_TITLE", "")
-
-    cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
-    # Insert the data into the database
-    insert_benchmarking_data(
-        os_type_version=args.os,
-        python_version=args.python_version,
-        performance_data=cleaned_benchmark_results,
-        commit_sha=args.commit_sha,
-        pr_title=pr_title,
-        branch_name=args.branch_name,
-        app_name=args.app_name,
-        pr_id=args.pr_id,
-    )
-
-
-if __name__ == "__main__":
-    main()

+ 0 - 75
benchmarks/benchmark_lighthouse.py

@@ -1,75 +0,0 @@
-"""Extracts the Lighthouse scores from the JSON files in the specified directory and inserts them into the database."""
-
-from __future__ import annotations
-
-import json
-import sys
-from pathlib import Path
-
-from utils import send_data_to_posthog
-
-
-def insert_benchmarking_data(
-    lighthouse_data: dict,
-    commit_sha: str,
-):
-    """Insert the benchmarking data into the database.
-
-    Args:
-        lighthouse_data: The Lighthouse data to insert.
-        commit_sha: The commit SHA to insert.
-    """
-    properties = {
-        "distinct_id": commit_sha,
-        "lighthouse_data": lighthouse_data,
-    }
-
-    # Send the data to PostHog
-    send_data_to_posthog("lighthouse_benchmark", properties)
-
-
-def get_lighthouse_scores(directory_path: str | Path) -> dict:
-    """Extracts the Lighthouse scores from the JSON files in the specified directory.
-
-    Args:
-        directory_path (str): The path to the directory containing the JSON files.
-
-    Returns:
-        dict: The Lighthouse scores.
-    """
-    scores = {}
-    directory_path = Path(directory_path)
-    try:
-        for filename in directory_path.iterdir():
-            if filename.suffix == ".json" and filename.stem != "manifest":
-                data = json.loads(filename.read_text())
-                # Extract scores and add them to the dictionary with the filename as key
-                scores[data["finalUrl"].replace("http://localhost:3000/", "/")] = {
-                    "performance_score": data["categories"]["performance"]["score"],
-                    "accessibility_score": data["categories"]["accessibility"]["score"],
-                    "best_practices_score": data["categories"]["best-practices"][
-                        "score"
-                    ],
-                    "seo_score": data["categories"]["seo"]["score"],
-                }
-    except Exception as e:
-        return {"error": e}
-
-    return scores
-
-
-def main():
-    """Runs the benchmarks and inserts the results into the database."""
-    # Get the commit SHA and JSON directory from the command line arguments
-    commit_sha = sys.argv[1]
-    json_dir = sys.argv[2]
-
-    # Get the Lighthouse scores
-    lighthouse_scores = get_lighthouse_scores(json_dir)
-
-    # Insert the data into the database
-    insert_benchmarking_data(lighthouse_scores, commit_sha)
-
-
-if __name__ == "__main__":
-    main()

+ 0 - 135
benchmarks/benchmark_package_size.py

@@ -1,135 +0,0 @@
-"""Checks the size of a specific directory and uploads result to Posthog."""
-
-import argparse
-import os
-from pathlib import Path
-
-from utils import get_directory_size, get_python_version, send_data_to_posthog
-
-
-def get_package_size(venv_path: Path, os_name):
-    """Get the size of a specified package.
-
-    Args:
-        venv_path: The path to the venv.
-        os_name: Name of os.
-
-    Returns:
-        The total size of the package in bytes.
-
-    Raises:
-        ValueError: when venv does not exist or python version is None.
-    """
-    python_version = get_python_version(venv_path, os_name)
-    print("Python version:", python_version)
-    if python_version is None:
-        raise ValueError("Error: Failed to determine Python version.")
-
-    is_windows = "windows" in os_name
-
-    package_dir: Path = (
-        venv_path / "lib" / f"python{python_version}" / "site-packages"
-        if not is_windows
-        else venv_path / "Lib" / "site-packages"
-    )
-    if not package_dir.exists():
-        raise ValueError(
-            "Error: Virtual environment does not exist or is not activated."
-        )
-
-    total_size = get_directory_size(package_dir)
-    return total_size
-
-
-def insert_benchmarking_data(
-    os_type_version: str,
-    python_version: str,
-    commit_sha: str,
-    pr_title: str,
-    branch_name: str,
-    pr_id: str,
-    path: str,
-):
-    """Insert the benchmarking data into PostHog.
-
-    Args:
-        os_type_version: The OS type and version to insert.
-        python_version: The Python version to insert.
-        commit_sha: The commit SHA to insert.
-        pr_title: The PR title to insert.
-        branch_name: The name of the branch.
-        pr_id: The id of the PR.
-        path: The path to the dir or file to check size.
-    """
-    if "./dist" in path:
-        size = get_directory_size(Path(path))
-    else:
-        size = get_package_size(Path(path), os_type_version)
-
-    # Prepare the event data
-    properties = {
-        "path": path,
-        "os": os_type_version,
-        "python_version": python_version,
-        "distinct_id": commit_sha,
-        "pr_title": pr_title,
-        "branch_name": branch_name,
-        "pr_id": pr_id,
-        "size_mb": round(
-            size / (1024 * 1024), 3
-        ),  # save size in MB and round to 3 places
-    }
-
-    send_data_to_posthog("package_size", properties)
-
-
-def main():
-    """Runs the benchmarks and inserts the results."""
-    parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
-    parser.add_argument(
-        "--os", help="The OS type and version to insert into the database."
-    )
-    parser.add_argument(
-        "--python-version", help="The Python version to insert into the database."
-    )
-    parser.add_argument(
-        "--commit-sha", help="The commit SHA to insert into the database."
-    )
-    parser.add_argument(
-        "--pr-title",
-        help="The PR title to insert into the database.",
-    )
-    parser.add_argument(
-        "--branch-name",
-        help="The current branch",
-        required=True,
-    )
-    parser.add_argument(
-        "--pr-id",
-        help="The pr id",
-        required=True,
-    )
-    parser.add_argument(
-        "--path",
-        help="The path to the vnenv.",
-        required=True,
-    )
-    args = parser.parse_args()
-
-    # Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
-    pr_title = args.pr_title or os.getenv("PR_TITLE", "")
-
-    # Insert the data into the database
-    insert_benchmarking_data(
-        os_type_version=args.os,
-        python_version=args.python_version,
-        commit_sha=args.commit_sha,
-        pr_title=pr_title,
-        branch_name=args.branch_name,
-        pr_id=args.pr_id,
-        path=args.path,
-    )
-
-
-if __name__ == "__main__":
-    main()

+ 0 - 106
benchmarks/benchmark_web_size.py

@@ -1,106 +0,0 @@
-"""Checks the size of a specific directory and uploads result to Posthog."""
-
-import argparse
-import os
-from pathlib import Path
-
-from utils import get_directory_size, send_data_to_posthog
-
-
-def insert_benchmarking_data(
-    os_type_version: str,
-    python_version: str,
-    app_name: str,
-    commit_sha: str,
-    pr_title: str,
-    branch_name: str,
-    pr_id: str,
-    path: str,
-):
-    """Insert the benchmarking data into PostHog.
-
-    Args:
-        app_name: The name of the app being measured.
-        os_type_version: The OS type and version to insert.
-        python_version: The Python version to insert.
-        commit_sha: The commit SHA to insert.
-        pr_title: The PR title to insert.
-        branch_name: The name of the branch.
-        pr_id: The id of the PR.
-        path: The path to the dir or file to check size.
-    """
-    size = get_directory_size(Path(path))
-
-    # Prepare the event data
-    properties = {
-        "app_name": app_name,
-        "os": os_type_version,
-        "python_version": python_version,
-        "distinct_id": commit_sha,
-        "pr_title": pr_title,
-        "branch_name": branch_name,
-        "pr_id": pr_id,
-        "size_mb": round(
-            size / (1024 * 1024), 3
-        ),  # save size in MB and round to 3 places
-    }
-
-    send_data_to_posthog("web-size", properties)
-
-
-def main():
-    """Runs the benchmarks and inserts the results."""
-    parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
-    parser.add_argument(
-        "--os", help="The OS type and version to insert into the database."
-    )
-    parser.add_argument(
-        "--python-version", help="The Python version to insert into the database."
-    )
-    parser.add_argument(
-        "--commit-sha", help="The commit SHA to insert into the database."
-    )
-    parser.add_argument(
-        "--pr-title",
-        help="The PR title to insert into the database.",
-    )
-    parser.add_argument(
-        "--branch-name",
-        help="The current branch",
-        required=True,
-    )
-    parser.add_argument(
-        "--app-name",
-        help="The name of the app measured.",
-        required=True,
-    )
-    parser.add_argument(
-        "--pr-id",
-        help="The pr id",
-        required=True,
-    )
-    parser.add_argument(
-        "--path",
-        help="The current path to app to check.",
-        required=True,
-    )
-    args = parser.parse_args()
-
-    # Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
-    pr_title = args.pr_title or os.getenv("PR_TITLE", "")
-
-    # Insert the data into the database
-    insert_benchmarking_data(
-        app_name=args.app_name,
-        os_type_version=args.os,
-        python_version=args.python_version,
-        commit_sha=args.commit_sha,
-        pr_title=pr_title,
-        branch_name=args.branch_name,
-        pr_id=args.pr_id,
-        path=args.path,
-    )
-
-
-if __name__ == "__main__":
-    main()

+ 0 - 20
benchmarks/conftest.py

@@ -1,20 +0,0 @@
-"""Shared conftest for all benchmark tests."""
-
-import pytest
-
-from reflex.testing import AppHarness, AppHarnessProd
-
-
-@pytest.fixture(
-    scope="session", params=[AppHarness, AppHarnessProd], ids=["dev", "prod"]
-)
-def app_harness_env(request):
-    """Parametrize the AppHarness class to use for the test, either dev or prod.
-
-    Args:
-        request: The pytest fixture request object.
-
-    Returns:
-        The AppHarness class to use for the test.
-    """
-    return request.param

+ 0 - 77
benchmarks/lighthouse.sh

@@ -1,77 +0,0 @@
-#!/bin/bash
-
-# Change directory to the first argument passed to the script
-project_dir=$1
-shift
-pushd "$project_dir" || exit 1
-echo "Changed directory to $project_dir"
-
-
-# So we get stdout / stderr from Python ASAP. Without this, delays can be very long (e.g. on Windows, Github Actions)
-export PYTHONUNBUFFERED=1
-
-env_mode=$1
-shift
-check_ports=${1:-3000 8000}
-shift
-
-# Start the server in the background
-export TELEMETRY_ENABLED=false
-reflex run --env "$env_mode" "$@" & pid=$!
-
-# Within the context of this bash, $pid_in_bash is what we need to pass to "kill" on exit
-# This is true on all platforms.
-pid_in_bash=$pid
-trap "kill -INT $pid_in_bash ||:" EXIT
-
-echo "Started server with PID $pid"
-
-# Assume we run from the root of the repo
-popd
-
-# In Windows, our Python script below needs to work with the WINPID
-if [ -f /proc/$pid/winpid ]; then
-  pid=$(cat /proc/$pid/winpid)
-  echo "Windows detected, passing winpid $pid to port waiter"
-fi
-
-python scripts/wait_for_listening_port.py $check_ports --timeout=600 --server-pid "$pid"
-
-
-# Check if something is running on port 3000
-if curl --output /dev/null --silent --head --fail "http://localhost:3000"; then
-  echo "URL exists: http://localhost:3000"
-else
-  echo "URL does not exist: https://localhost:3000"
-fi
-
-mkdir -p ./tests/benchmarks/.lighthouseci
-
-# Create a lighthouserc.js file
-cat << EOF > lighthouserc.js
-module.exports = {
-  ci: {
-    collect: {
-     isSinglePageApplication: true,
-     numberOfRuns: 1,
-     url: ['http://localhost:3000', "http://localhost:3000/docs/getting-started/introduction/", "http://localhost:3000/blog/2023-08-02-seed-annoucement/"]
-    },
-    upload: {
-      target: 'filesystem',
-      "outputDir": "./integration/benchmarks/.lighthouseci"
-    },
-  },
-};
-EOF
-
-# Install and Run LHCI
-npm install -g @lhci/cli
-lhci autorun
-
-# Check to see if the LHCI report is generated
-if [ -d "./integration/benchmarks/.lighthouseci" ] && [ "$(ls -A ./integration/benchmarks/.lighthouseci)" ]; then
-  echo "LHCI report generated"
-else
-  echo "LHCI report not generated"
-  exit 1 # Exits the script with a status of 1, which will cause the GitHub Action to stop
-fi

+ 0 - 74
benchmarks/utils.py

@@ -1,74 +0,0 @@
-"""Utility functions for the benchmarks."""
-
-import os
-import subprocess
-from pathlib import Path
-
-import httpx
-from httpx import HTTPError
-
-
-def get_python_version(venv_path: Path, os_name):
-    """Get the python version of python in a virtual env.
-
-    Args:
-        venv_path: Path to virtual environment.
-        os_name: Name of os.
-
-    Returns:
-        The python version.
-    """
-    python_executable = (
-        venv_path / "bin" / "python"
-        if "windows" not in os_name
-        else venv_path / "Scripts" / "python.exe"
-    )
-    try:
-        output = subprocess.check_output(
-            [str(python_executable), "--version"], stderr=subprocess.STDOUT
-        )
-        python_version = output.decode("utf-8").strip().split()[1]
-        return ".".join(python_version.split(".")[:-1])
-    except subprocess.CalledProcessError:
-        return None
-
-
-def get_directory_size(directory: Path):
-    """Get the size of a directory in bytes.
-
-    Args:
-        directory: The directory to check.
-
-    Returns:
-        The size of the dir in bytes.
-    """
-    total_size = 0
-    for dirpath, _, filenames in os.walk(directory):
-        for f in filenames:
-            fp = Path(dirpath) / f
-            total_size += fp.stat().st_size
-    return total_size
-
-
-def send_data_to_posthog(event, properties):
-    """Send data to PostHog.
-
-    Args:
-        event: The event to send.
-        properties: The properties to send.
-
-    Raises:
-        HTTPError: When there is an error sending data to PostHog.
-    """
-    event_data = {
-        "api_key": "phc_JoMo0fOyi0GQAooY3UyO9k0hebGkMyFJrrCw1Gt5SGb",
-        "event": event,
-        "properties": properties,
-    }
-
-    with httpx.Client() as client:
-        response = client.post("https://app.posthog.com/capture/", json=event_data)
-        if response.status_code != 200:
-            raise HTTPError(
-                f"Error sending data to PostHog: {response.status_code} - {response.text}"
-            )