Source code for lsurf.utilities.detector_analysis

# The Clear BSD License
#
# Copyright (c) 2026 Tobias Heibges
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted (subject to the limitations in the disclaimer
# below) provided that the following conditions are met:
#
#      * Redistributions of source code must retain the above copyright notice,
#      this list of conditions and the following disclaimer.
#
#      * Redistributions in binary form must reproduce the above copyright
#      notice, this list of conditions and the following disclaimer in the
#      documentation and/or other materials provided with the distribution.
#
#      * Neither the name of the copyright holder nor the names of its
#      contributors may be used to endorse or promote products derived from this
#      software without specific prior written permission.
#
# NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
# THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.

"""
Detector Analysis Utilities

Functions for analyzing detector data including peak irradiance (W/m²)
and Pareto front computation for irradiance vs time spread trade-offs.
"""

from typing import Any

import numpy as np
from numpy.typing import NDArray

from ..analysis.healpix_utils import (
    HAS_HEALPIX,
    rays_to_healpix,
)


def weighted_percentile(
    values: NDArray,
    weights: NDArray,
    percentile: float,
) -> float:
    """
    Compute weighted percentile.

    Parameters
    ----------
    values : ndarray
        Data values
    weights : ndarray
        Weights for each value (e.g., ray intensities)
    percentile : float
        Percentile to compute (0-100)

    Returns
    -------
    float
        The weighted percentile value
    """
    if len(values) == 0:
        return 0.0

    # Sort by values
    sort_idx = np.argsort(values)
    sorted_values = values[sort_idx]
    sorted_weights = weights[sort_idx]

    # Compute cumulative weights (normalized to 0-1)
    cumsum = np.cumsum(sorted_weights)
    cumsum_normalized = cumsum / cumsum[-1]

    # Find where cumulative weight crosses the percentile threshold
    threshold = percentile / 100.0

    # Use linear interpolation
    idx = np.searchsorted(cumsum_normalized, threshold)

    if idx == 0:
        return float(sorted_values[0])
    if idx >= len(sorted_values):
        return float(sorted_values[-1])

    # Linear interpolation between adjacent points
    w_low = cumsum_normalized[idx - 1]
    w_high = cumsum_normalized[idx]
    v_low = sorted_values[idx - 1]
    v_high = sorted_values[idx]

    if w_high == w_low:
        return float(v_low)

    # Interpolate
    frac = (threshold - w_low) / (w_high - w_low)
    return float(v_low + frac * (v_high - v_low))


[docs] def find_peak_irradiance_local( recorded_rays, detector_radius: float, detector_center: NDArray | None = None, n_bins: int = 50, bin_size_meters: float | None = None, ) -> dict[str, Any]: """ Find peak irradiance using local tangent plane coordinates. Uses a local East-North coordinate system centered on the peak region, providing constant resolution in physical units (meters). This avoids the polar singularities and anisotropic bin sizes of lat/lon binning. Parameters ---------- recorded_rays : RecordedRays Recorded rays on detection sphere detector_radius : float Radius of the detector sphere (m) detector_center : array-like, optional Center of the detector sphere (default origin) n_bins : int Number of bins in each dimension (default 50) bin_size_meters : float, optional Physical size of each bin in meters. If None, auto-computed to cover the data extent with n_bins. Returns ------- dict Dictionary containing: - peak_east, peak_north: Peak location in local coordinates (m) - peak_lon, peak_lat: Peak location in radians - peak_lon_deg, peak_lat_deg: Peak location in degrees - peak_position: Peak location in Cartesian (x, y, z) - peak_irradiance: Irradiance at peak (W/m²) - total_power: Total detected power (W) - histogram: 2D irradiance histogram - east_edges, north_edges: Bin edges in local coords (m) - bin_size: Physical bin size (m) """ positions = recorded_rays.positions intensities = recorded_rays.intensities # Convert to spherical coordinates relative to detector center if detector_center is not None: positions = positions - np.array(detector_center) x, y, z = positions[:, 0], positions[:, 1], positions[:, 2] r = np.sqrt(x**2 + y**2 + z**2) lat = np.arcsin(z / r) lon = np.arctan2(y, x) # Step 1: Find approximate peak using intensity-weighted centroid total_intensity = np.sum(intensities) if total_intensity > 0: peak_lat_approx = np.sum(lat * intensities) / total_intensity peak_lon_approx = np.sum(lon * intensities) / total_intensity else: peak_lat_approx = np.mean(lat) peak_lon_approx = np.mean(lon) # Step 2: Compute local tangent plane basis vectors at approximate peak # Radial direction (outward from sphere center) radial = np.array( [ np.cos(peak_lat_approx) * np.cos(peak_lon_approx), np.cos(peak_lat_approx) * np.sin(peak_lon_approx), np.sin(peak_lat_approx), ] ) # East direction (tangent to latitude circles, pointing east) east = np.array([-np.sin(peak_lon_approx), np.cos(peak_lon_approx), 0.0]) east = east / np.linalg.norm(east) # North direction (tangent to longitude circles, pointing north) north = np.cross(radial, east) north = north / np.linalg.norm(north) # Step 3: Project all ray positions onto local tangent plane # Position on sphere relative to peak point peak_point = detector_radius * radial rel_positions = positions - peak_point # Project onto East-North plane (in meters) local_east = np.dot(rel_positions, east) local_north = np.dot(rel_positions, north) # Step 4: Determine bin size east_extent = local_east.max() - local_east.min() north_extent = local_north.max() - local_north.min() if bin_size_meters is None: # Auto-compute to cover data with n_bins bin_size = max(east_extent, north_extent) / n_bins * 1.1 # 10% padding bin_size = max(bin_size, 1.0) # Minimum 1 meter bins else: bin_size = bin_size_meters # Step 5: Create uniform grid in local coordinates east_min = local_east.min() - bin_size east_max = local_east.max() + bin_size north_min = local_north.min() - bin_size north_max = local_north.max() + bin_size n_east_bins = max(1, int(np.ceil((east_max - east_min) / bin_size))) n_north_bins = max(1, int(np.ceil((north_max - north_min) / bin_size))) east_edges = np.linspace( east_min, east_min + n_east_bins * bin_size, n_east_bins + 1 ) north_edges = np.linspace( north_min, north_min + n_north_bins * bin_size, n_north_bins + 1 ) # Step 6: Bin rays and compute irradiance hist, _, _ = np.histogram2d( local_east, local_north, bins=[east_edges, north_edges], weights=intensities ) # Physical area per bin (constant for all bins!) bin_area = bin_size**2 # m² # Irradiance = power / area (W/m²) irradiance = hist / bin_area # Step 7: Find peak peak_idx = np.unravel_index(np.argmax(irradiance), irradiance.shape) peak_east_bin, peak_north_bin = peak_idx east_centers = (east_edges[:-1] + east_edges[1:]) / 2 north_centers = (north_edges[:-1] + north_edges[1:]) / 2 peak_east = east_centers[peak_east_bin] peak_north = north_centers[peak_north_bin] # Refine peak using intensity-weighted centroid in neighborhood east_min_idx = max(0, peak_east_bin - 1) east_max_idx = min(n_east_bins, peak_east_bin + 2) north_min_idx = max(0, peak_north_bin - 1) north_max_idx = min(n_north_bins, peak_north_bin + 2) neighborhood_mask = ( (local_east >= east_edges[east_min_idx]) & (local_east < east_edges[east_max_idx]) & (local_north >= north_edges[north_min_idx]) & (local_north < north_edges[north_max_idx]) ) if np.sum(neighborhood_mask) > 0: weights = intensities[neighborhood_mask] total_weight = np.sum(weights) if total_weight > 0: peak_east = np.sum(local_east[neighborhood_mask] * weights) / total_weight peak_north = np.sum(local_north[neighborhood_mask] * weights) / total_weight # Step 8: Convert peak back to global coordinates peak_local_3d = peak_point + peak_east * east + peak_north * north peak_r = np.linalg.norm(peak_local_3d) # Normalize to sphere surface peak_position = peak_local_3d * (detector_radius / peak_r) # Get lat/lon of peak peak_lat = np.arcsin(peak_position[2] / detector_radius) peak_lon = np.arctan2(peak_position[1], peak_position[0]) return { "peak_east": peak_east, "peak_north": peak_north, "peak_lon": peak_lon, "peak_lat": peak_lat, "peak_lon_deg": np.degrees(peak_lon), "peak_lat_deg": np.degrees(peak_lat), "peak_position": peak_position, "peak_irradiance": irradiance[peak_idx], "total_power": np.sum(intensities), "histogram": irradiance, "east_edges": east_edges, "north_edges": north_edges, "east_centers": east_centers, "north_centers": north_centers, "bin_size": bin_size, "local_basis": {"east": east, "north": north, "radial": radial}, "tangent_point": peak_point, }
[docs] def find_peak_energy_density( recorded_rays, detector_radius: float, detector_center: NDArray | None = None, n_bins: int = 50, ) -> dict[str, Any]: """ Scan the detector sphere to find the location with highest energy density. Parameters ---------- recorded_rays : RecordedRays Recorded rays on detection sphere detector_radius : float Radius of the detector sphere (m) detector_center : array-like, optional Center of the detector sphere (default origin) n_bins : int Number of bins in each angular dimension Returns ------- dict Dictionary containing: - peak_lon, peak_lat: Peak location in radians - peak_lon_deg, peak_lat_deg: Peak location in degrees - peak_position: Peak location in Cartesian (x, y, z) - peak_irradiance: Irradiance at peak (W/m²) - total_power: Total detected power - histogram: 2D irradiance histogram - lon_edges, lat_edges: Bin edges - lon_centers, lat_centers: Bin centers Notes ----- This function uses lat/lon binning which has anisotropic resolution (bins are smaller near poles). For more uniform resolution, use find_peak_irradiance_local() which uses local tangent plane coordinates. """ positions = recorded_rays.positions intensities = recorded_rays.intensities # Convert to spherical coordinates relative to detector center if detector_center is not None: positions = positions - np.array(detector_center) x, y, z = positions[:, 0], positions[:, 1], positions[:, 2] r = np.sqrt(x**2 + y**2 + z**2) lat = np.arcsin(z / r) # -pi/2 to pi/2 lon = np.arctan2(y, x) # -pi to pi # Determine bin ranges based on data extent (with small padding) lat_min, lat_max = lat.min(), lat.max() lon_min, lon_max = lon.min(), lon.max() lat_padding = (lat_max - lat_min) * 0.1 + 0.01 lon_padding = (lon_max - lon_min) * 0.1 + 0.01 lat_range = (lat_min - lat_padding, lat_max + lat_padding) lon_range = (lon_min - lon_padding, lon_max + lon_padding) # Create 2D histogram weighted by intensity hist, lon_edges, lat_edges = np.histogram2d( lon, lat, bins=n_bins, range=[lon_range, lat_range], weights=intensities ) # Compute solid angle per bin for energy density (W/sr) # Solid angle element: dΩ = cos(lat) * dlat * dlon dlat = lat_edges[1] - lat_edges[0] dlon = lon_edges[1] - lon_edges[0] lat_centers = (lat_edges[:-1] + lat_edges[1:]) / 2 solid_angles = np.abs(np.cos(lat_centers)) * dlat * dlon # Energy density = power / solid_angle (W/sr) energy_density = hist / solid_angles[np.newaxis, :] # Also compute physical area per bin for irradiance (W/m²) # Area element on sphere: dA = R² * cos(lat) * dlat * dlon = R² * dΩ physical_areas = (detector_radius**2) * solid_angles # Irradiance = power / area (W/m²) irradiance = hist / physical_areas[np.newaxis, :] # Find peak bin (use energy density for consistency with original behavior) peak_idx = np.unravel_index(np.argmax(energy_density), energy_density.shape) peak_lon_bin, peak_lat_bin = peak_idx # Get bin center coordinates lon_centers = (lon_edges[:-1] + lon_edges[1:]) / 2 peak_lon = lon_centers[peak_lon_bin] peak_lat = lat_centers[peak_lat_bin] # Refine peak location using intensity-weighted centroid in neighborhood # Use 3x3 neighborhood around peak lon_min_idx = max(0, peak_lon_bin - 1) lon_max_idx = min(n_bins, peak_lon_bin + 2) lat_min_idx = max(0, peak_lat_bin - 1) lat_max_idx = min(n_bins, peak_lat_bin + 2) # Find rays within the neighborhood bins neighborhood_mask = ( (lon >= lon_edges[lon_min_idx]) & (lon < lon_edges[lon_max_idx]) & (lat >= lat_edges[lat_min_idx]) & (lat < lat_edges[lat_max_idx]) ) if np.sum(neighborhood_mask) > 0: weights = intensities[neighborhood_mask] total_weight = np.sum(weights) if total_weight > 0: peak_lon = np.sum(lon[neighborhood_mask] * weights) / total_weight peak_lat = np.sum(lat[neighborhood_mask] * weights) / total_weight # Convert peak to Cartesian coordinates on sphere peak_x = detector_radius * np.cos(peak_lat) * np.cos(peak_lon) peak_y = detector_radius * np.cos(peak_lat) * np.sin(peak_lon) peak_z = detector_radius * np.sin(peak_lat) return { "peak_lon": peak_lon, "peak_lat": peak_lat, "peak_lon_deg": np.degrees(peak_lon), "peak_lat_deg": np.degrees(peak_lat), "peak_position": np.array([peak_x, peak_y, peak_z]), "peak_energy_density": energy_density[peak_idx], # W/sr "peak_irradiance": irradiance[peak_idx], # W/m² "total_power": np.sum(intensities), "histogram": energy_density, # W/sr (original behavior) "histogram_irradiance": irradiance, # W/m² "lon_edges": lon_edges, "lat_edges": lat_edges, "lon_centers": lon_centers, "lat_centers": lat_centers, }
[docs] def compute_pareto_front( recorded_rays, detector_radius: float, detector_center: NDArray | None = None, n_bins: int = 30, min_rays_per_bin: int = 5, ) -> dict[str, Any]: """ Compute Pareto front for irradiance vs time spread trade-off. Parameters ---------- recorded_rays : RecordedRays Recorded rays on detection sphere detector_radius : float Radius of the detector sphere (m) detector_center : array-like, optional Center of the detector sphere (default origin) n_bins : int Number of bins in each angular dimension min_rays_per_bin : int Minimum rays required per bin for reliable statistics Returns ------- dict Dictionary containing: - bin_data: List of dicts with (lon, lat, irradiance, time_spread, n_rays) - pareto_indices: Indices of Pareto-optimal bins - pareto_front: List of Pareto-optimal bin data - all_irradiances: Array of all bin irradiances (W/m²) - all_time_spreads: Array of all bin time spreads """ positions = recorded_rays.positions intensities = recorded_rays.intensities times = recorded_rays.times # Convert to spherical coordinates relative to detector center if detector_center is not None: positions = positions - np.array(detector_center) x, y, z = positions[:, 0], positions[:, 1], positions[:, 2] r = np.sqrt(x**2 + y**2 + z**2) lat = np.arcsin(z / r) lon = np.arctan2(y, x) # Determine bin ranges based on data extent lat_min, lat_max = lat.min(), lat.max() lon_min, lon_max = lon.min(), lon.max() lat_padding = (lat_max - lat_min) * 0.05 + 0.001 lon_padding = (lon_max - lon_min) * 0.05 + 0.001 lat_edges = np.linspace(lat_min - lat_padding, lat_max + lat_padding, n_bins + 1) lon_edges = np.linspace(lon_min - lon_padding, lon_max + lon_padding, n_bins + 1) dlat = lat_edges[1] - lat_edges[0] dlon = lon_edges[1] - lon_edges[0] # Digitize rays into bins lon_bin_idx = np.digitize(lon, lon_edges) - 1 lat_bin_idx = np.digitize(lat, lat_edges) - 1 # Clamp to valid range lon_bin_idx = np.clip(lon_bin_idx, 0, n_bins - 1) lat_bin_idx = np.clip(lat_bin_idx, 0, n_bins - 1) # Compute metrics for each bin bin_data: list[dict[str, Any]] = [] for i in range(n_bins): for j in range(n_bins): mask = (lon_bin_idx == i) & (lat_bin_idx == j) n_rays = np.sum(mask) if n_rays >= min_rays_per_bin: bin_intensities = intensities[mask] bin_times = times[mask] # Energy density (W/sr) lat_center = (lat_edges[j] + lat_edges[j + 1]) / 2 solid_angle = np.abs(np.cos(lat_center)) * dlat * dlon energy_density = np.sum(bin_intensities) / solid_angle # Time spread: intensity-weighted 90th - 10th percentile # This ensures low-intensity multi-bounce rays contribute # proportionally less to the time spread metric time_90 = weighted_percentile(bin_times, bin_intensities, 90) time_10 = weighted_percentile(bin_times, bin_intensities, 10) time_spread_ns = (time_90 - time_10) * 1e9 lon_center = (lon_edges[i] + lon_edges[i + 1]) / 2 bin_data.append( { "lon": lon_center, "lat": lat_center, "lon_deg": np.degrees(lon_center), "lat_deg": np.degrees(lat_center), "energy_density": energy_density, "time_spread_ns": time_spread_ns, "n_rays": int(n_rays), "total_intensity": np.sum(bin_intensities), } ) if len(bin_data) == 0: return { "bin_data": [], "pareto_indices": np.array([], dtype=int), "pareto_front": [], "all_energy_densities": np.array([]), "all_time_spreads": np.array([]), } # Find Pareto front # A point is Pareto-optimal if no other point has BOTH: # - higher energy density AND lower time spread energy_densities = np.array([b["energy_density"] for b in bin_data]) time_spreads = np.array([b["time_spread_ns"] for b in bin_data]) n_points = len(bin_data) is_pareto = np.ones(n_points, dtype=bool) for i in range(n_points): for j in range(n_points): if i != j: # Check if j dominates i # j dominates i if j has higher energy AND lower time spread if ( energy_densities[j] > energy_densities[i] and time_spreads[j] < time_spreads[i] ): is_pareto[i] = False break pareto_indices = np.where(is_pareto)[0] pareto_front = [bin_data[i] for i in pareto_indices] # Sort Pareto front by time spread (ascending) pareto_front = sorted(pareto_front, key=lambda x: x["time_spread_ns"]) return { "bin_data": bin_data, "pareto_indices": pareto_indices, "pareto_front": pareto_front, "all_energy_densities": energy_densities, "all_time_spreads": time_spreads, }
[docs] def analyze_healpix_detector( recorded_rays, detector_radius: float, nside: int = 128, min_rays_per_pixel: int = 5, ) -> dict[str, Any]: """ Analyze detected rays using HEALPix equal-area pixelization. HEALPix provides equal-area pixels on the sphere, avoiding the polar singularities and anisotropic bin sizes of lat/lon binning. Parameters ---------- recorded_rays : RecordedRays Recorded ray data from detection sphere detector_radius : float Radius of detection sphere in meters nside : int HEALPix resolution parameter (default 128 → ~13,000 pixels) nside=64 → ~3,400 pixels nside=128 → ~13,000 pixels nside=256 → ~50,000 pixels min_rays_per_pixel : int Minimum rays required per pixel for valid statistics Returns ------- dict Dictionary containing: - peak_pixel_id: HEALPix pixel with highest irradiance - peak_lon_deg, peak_lat_deg: Peak location in degrees - peak_irradiance: Irradiance at peak (W/m²) - total_power: Total detected power (W) - pixel_area: Area of each HEALPix pixel (m²) - pixel_data: List of dicts with per-pixel data: - pixel_id, lon_deg, lat_deg - irradiance (W/m²) - time_spread_ns (90th-10th percentile) - ray_count - healpix_data: Full HEALPixData object Raises ------ ImportError If astropy-healpix is not installed """ if not HAS_HEALPIX: raise ImportError( "astropy-healpix is required. Install with: pip install astropy-healpix" ) # Convert rays to HEALPix representation healpix_data = rays_to_healpix(recorded_rays, nside=nside) # Compute equal pixel area (constant for all HEALPix pixels) npix = 12 * nside**2 solid_angle_per_pixel = 4 * np.pi / npix # steradians pixel_area = detector_radius**2 * solid_angle_per_pixel # m² # Get unique pixel indices unique_pixels = np.unique(healpix_data.pixel_indices) # Compute per-pixel statistics pixel_data: list[dict[str, Any]] = [] peak_irradiance = 0.0 peak_pixel_id = -1 peak_lon_deg = 0.0 peak_lat_deg = 0.0 for pixel_id in unique_pixels: mask = healpix_data.pixel_indices == pixel_id n_rays = np.sum(mask) if n_rays < min_rays_per_pixel: continue # Get rays in this pixel pixel_intensities = healpix_data.intensities[mask] pixel_times = healpix_data.times[mask] pixel_lons = healpix_data.lon[mask] pixel_lats = healpix_data.lat[mask] # Sum intensity and compute irradiance intensity_sum = np.sum(pixel_intensities) irradiance = intensity_sum / pixel_area # W/m² # Time spread: intensity-weighted 90th - 10th percentile if len(pixel_times) >= 2: time_90 = weighted_percentile(pixel_times, pixel_intensities, 90) time_10 = weighted_percentile(pixel_times, pixel_intensities, 10) time_spread_ns = (time_90 - time_10) * 1e9 else: time_spread_ns = 0.0 # Mean position (intensity-weighted) total_intensity = intensity_sum if intensity_sum > 0 else 1.0 mean_lon = np.sum(pixel_lons * pixel_intensities) / total_intensity mean_lat = np.sum(pixel_lats * pixel_intensities) / total_intensity pixel_data.append( { "pixel_id": int(pixel_id), "lon_deg": np.degrees(mean_lon), "lat_deg": np.degrees(mean_lat), "irradiance": irradiance, "intensity_sum": intensity_sum, "time_spread_ns": time_spread_ns, "ray_count": int(n_rays), } ) # Track peak if irradiance > peak_irradiance: peak_irradiance = irradiance peak_pixel_id = int(pixel_id) peak_lon_deg = np.degrees(mean_lon) peak_lat_deg = np.degrees(mean_lat) return { "peak_pixel_id": peak_pixel_id, "peak_lon_deg": peak_lon_deg, "peak_lat_deg": peak_lat_deg, "peak_irradiance": peak_irradiance, "total_power": np.sum(healpix_data.intensities), "pixel_area": pixel_area, "nside": nside, "npix": npix, "pixel_data": pixel_data, "healpix_data": healpix_data, }
[docs] def compute_healpix_pareto_front( recorded_rays, detector_radius: float, nside: int = 64, min_rays_per_pixel: int = 10, ) -> dict[str, Any]: """ Compute Pareto front for irradiance vs time spread using HEALPix. A pixel is Pareto-optimal if no other pixel has BOTH higher irradiance AND lower time spread. Parameters ---------- recorded_rays : RecordedRays Recorded ray data from detection sphere detector_radius : float Radius of detection sphere in meters nside : int HEALPix resolution parameter (default 64 → ~3,400 pixels) min_rays_per_pixel : int Minimum rays required per pixel for valid statistics Returns ------- dict Dictionary containing: - pixel_data: List of all valid pixel dicts - pareto_indices: Indices of Pareto-optimal pixels - pareto_front: List of Pareto-optimal pixel data - all_irradiances: Array of all pixel irradiances - all_time_spreads: Array of all pixel time spreads Raises ------ ImportError If astropy-healpix is not installed """ # Use analyze_healpix_detector to get pixel data result = analyze_healpix_detector( recorded_rays, detector_radius, nside=nside, min_rays_per_pixel=min_rays_per_pixel, ) pixel_data = result["pixel_data"] if len(pixel_data) == 0: return { "pixel_data": [], "pareto_indices": np.array([], dtype=int), "pareto_front": [], "all_irradiances": np.array([]), "all_time_spreads": np.array([]), } # Extract arrays for Pareto computation irradiances = np.array([p["irradiance"] for p in pixel_data]) time_spreads = np.array([p["time_spread_ns"] for p in pixel_data]) # Find Pareto front n_points = len(pixel_data) is_pareto = np.ones(n_points, dtype=bool) for i in range(n_points): for j in range(n_points): if i != j: # Check if j dominates i # j dominates i if j has higher irradiance AND lower time spread if ( irradiances[j] > irradiances[i] and time_spreads[j] < time_spreads[i] ): is_pareto[i] = False break pareto_indices = np.where(is_pareto)[0] pareto_front = [pixel_data[i] for i in pareto_indices] # Sort Pareto front by time spread (ascending) pareto_front = sorted(pareto_front, key=lambda x: x["time_spread_ns"]) return { "pixel_data": pixel_data, "pareto_indices": pareto_indices, "pareto_front": pareto_front, "all_irradiances": irradiances, "all_time_spreads": time_spreads, }