aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSam Scholten2025-10-27 19:09:25 +1000
committerSam Scholten2025-10-27 19:17:05 +1000
commit20320a87d98c8517869f5e7c0fcd66bfdc090713 (patch)
tree624d3fde714533168a618490d96006c87b0a0807
parentec29d6eda97ce1e9d4121c6c19f91d12ec25947c (diff)
downloadtransivent-20320a87d98c8517869f5e7c0fcd66bfdc090713.tar.gz
transivent-20320a87d98c8517869f5e7c0fcd66bfdc090713.zip
drop loguru
-rw-r--r--pyproject.toml1
-rw-r--r--src/transivent/__init__.py4
-rw-r--r--src/transivent/analysis.py253
-rw-r--r--src/transivent/event_detector.py16
-rw-r--r--src/transivent/event_plotter.py23
-rw-r--r--src/transivent/event_processor.py36
-rw-r--r--src/transivent/io.py44
-rw-r--r--src/transivent/utils.py19
-rw-r--r--tests/test_diffusion_simple.py2
-rw-r--r--tests/test_event_processor.py2
-rw-r--r--tests/test_simple.py4
-rw-r--r--uv.lock25
12 files changed, 145 insertions, 284 deletions
diff --git a/pyproject.toml b/pyproject.toml
index 413aaef..69cd1c9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,7 +7,6 @@ authors = [{ name = "Sam Scholten", email = "s.scholten@uq.edu.au" }]
requires-python = ">=3.8"
dependencies = [
"joblib",
- "loguru",
"matplotlib",
"numba",
"Pillow",
diff --git a/src/transivent/__init__.py b/src/transivent/__init__.py
index b802f30..558b2a3 100644
--- a/src/transivent/__init__.py
+++ b/src/transivent/__init__.py
@@ -13,7 +13,7 @@ For advanced usage, building blocks are available in submodules:
- transivent.diffusion: Diffusion analysis tools (optional)
"""
-from .analysis import configure_logging, detect, detect_from_wfm
+from .analysis import detect, detect_from_wfm
from .event_detector import detect_events, merge_overlapping_events
from .event_plotter import EventPlotter
from .event_processor import extract_event_waveforms
@@ -24,8 +24,6 @@ __all__ = [
# Main entry points
"detect",
"detect_from_wfm",
- # Utilities
- "configure_logging",
# Building blocks
"detect_events",
"merge_overlapping_events",
diff --git a/src/transivent/analysis.py b/src/transivent/analysis.py
index ad856ed..6a7c167 100644
--- a/src/transivent/analysis.py
+++ b/src/transivent/analysis.py
@@ -1,3 +1,4 @@
+import warnings
import base64
import io
import os
@@ -7,7 +8,7 @@ from typing import Any, Dict, List, Optional, Tuple
import matplotlib.pyplot as plt
import numpy as np
-from loguru import logger
+
from numba import njit
from PIL import Image
from scipy.ndimage import gaussian_filter1d, median_filter, uniform_filter1d
@@ -52,12 +53,12 @@ def extract_preview_image(sidecar_path: str, output_path: str) -> Optional[str]:
# Find PreviewImage element
preview_elem = root.find(".//PreviewImage")
if preview_elem is None:
- logger.warning(f"No PreviewImage found in {sidecar_path}")
+ warnings.warn(f"No PreviewImage found in {sidecar_path}")
return None
image_data = preview_elem.get("ImageData")
if not image_data:
- logger.warning(f"Empty ImageData in PreviewImage from {sidecar_path}")
+ warnings.warn(f"Empty ImageData in PreviewImage from {sidecar_path}")
return None
# Decode base64 image data
@@ -67,11 +68,10 @@ def extract_preview_image(sidecar_path: str, output_path: str) -> Optional[str]:
image = Image.open(io.BytesIO(image_bytes))
image.save(output_path, "PNG")
- logger.info(f"Saved preview image: {output_path}")
return output_path
except Exception as e:
- logger.warning(f"Failed to extract preview image from {sidecar_path}: {e}")
+ warnings.warn(f"Failed to extract preview image from {sidecar_path}: {e}")
return None
@@ -95,29 +95,10 @@ def plot_preview_image(image_path: str, title: str = "Preview Image") -> None:
ax.axis('off') # Hide axes for cleaner display
except Exception as e:
- logger.warning(f"Failed to display preview image {image_path}: {e}")
-
-
+ warnings.warn(f"Failed to display preview image {image_path}: {e}")
-def configure_logging(log_level: str = "INFO") -> None:
- """
- Configure loguru logging with specified level.
-
- Parameters
- ----------
- log_level : str, default="INFO"
- Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL.
- """
- logger.remove()
- logger.add(
- sys.stderr,
- level=log_level.upper(),
- format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <level>{message}</level>",
- colorize=True,
- )
-
def load_data(
name: str,
@@ -147,7 +128,6 @@ def load_data(
Tuple[np.ndarray, np.ndarray]
Time and signal arrays.
"""
- logger.success(f"Loading data from {name}")
t, x = rd(
name,
sampling_interval,
@@ -156,10 +136,6 @@ def load_data(
crop=crop,
)
- logger.debug(
- f"Signal statistics: min={np.min(x):.3g}, max={np.max(x):.3g}, mean={np.mean(x):.3g}, std={np.std(x):.3g}"
- )
-
return t, x
@@ -212,19 +188,9 @@ def calculate_smoothing_parameters(
min_event_n = max(1, int(min_event_t / sampling_interval))
- smooth_freq_hz = 1 / (smooth_n * sampling_interval)
- logger.info(
- f"--Smooth window: {smooth_n} samples ({smooth_win_t * 1e6:.1f} µs, {smooth_freq_hz:.1f} Hz)"
- )
- logger.info(
- f"--Min event length: {min_event_n} samples ({min_event_t * 1e6:.1f} µs)"
- )
- logger.info(f"--Detection SNR: {detection_snr}")
- logger.info(f"--Min keep SNR: {min_event_keep_snr}")
- logger.info(f"--Widen fraction: {widen_frac}")
- logger.info(
- f"--Signal polarity: {signal_polarity} ({'negative' if signal_polarity < 0 else 'positive'} events)"
- )
+ # Removed: print(f"--Smooth window: {smooth_n} samples ({smooth_win_t * 1e6:.1f} µs, {1 / (smooth_n * sampling_interval):.1f} Hz)")
+ # Removed: print(f"--Min event length: {min_event_n} samples ({min_event_t * 1e6:.1f} µs)")
+ # Removed: print(f"--Signal polarity: {signal_polarity} ({'negative' if signal_polarity < 0 else 'positive'} events)")
return smooth_n, min_event_n
@@ -258,7 +224,6 @@ def calculate_initial_background(
3 Use Moving Average for maximum speed if events are well above noise
4 Reserve Savitzky-Golay for final high-quality analysis of interesting datasets
"""
- logger.info(f"Calculating initial background using {filter_type} filter")
if filter_type == "savgol":
bg_initial = savgol_filter(x, smooth_n, 3).astype(np.float32)
@@ -279,9 +244,7 @@ def calculate_initial_background(
f"Unknown filter_type: {filter_type}. Choose from 'savgol', 'gaussian', 'moving_average', 'median'"
)
- logger.debug(
- f"Initial background: mean={np.mean(bg_initial):.3g}, std={np.std(bg_initial):.3g}"
- )
+ # Removed: print(f"Initial background: mean={np.mean(bg_initial):.3g}, std={np.std(bg_initial):.3g}")
return bg_initial
@@ -303,17 +266,13 @@ def estimate_noise(x: np.ndarray, bg_initial: np.ndarray) -> np.float32:
"""
global_noise = np.float32(np.median(np.abs(x - bg_initial)) * MEDIAN_TO_STD_FACTOR)
- signal_rms = np.sqrt(np.mean(x**2))
- signal_range = np.max(x) - np.min(x)
- noise_pct_rms = 100 * global_noise / signal_rms if signal_rms > 0 else 0
- noise_pct_range = 100 * global_noise / signal_range if signal_range > 0 else 0
-
- logger.info(
- f"Global noise level: {global_noise:.3g} ({noise_pct_rms:.1f}% of RMS, {noise_pct_range:.1f}% of range)"
- )
-
- snr_estimate = np.std(x) / global_noise
- logger.info(f"Estimated signal SNR: {snr_estimate:.2f}")
+ # Removed:
+ # signal_rms = np.sqrt(np.mean(x**2))
+ # signal_range = np.max(x) - np.min(x)
+ # noise_pct_rms = 100 * global_noise / signal_rms if signal_rms > 0 else 0
+ # noise_pct_range = 100 * global_noise / signal_range if signal_range > 0 else 0
+ # print(f"Global noise level: {global_noise:.3g} ({noise_pct_rms:.1f}% of RMS, {noise_pct_range:.1f}% of range)")
+ # snr_estimate = np.std(x) / global_noise
return global_noise
@@ -358,12 +317,8 @@ def detect_initial_events(
np.ndarray
Array of initial events.
"""
- logger.info("Detecting initial events")
min_event_amp = np.float32(min_event_keep_snr) * global_noise
- logger.info(f"Detection threshold: {detection_snr}σ below background")
- logger.info(f"Keep threshold: {min_event_keep_snr}σ below background")
- logger.info(f"Min event amplitude threshold: {min_event_amp:.3g}")
events_initial, _ = detect_events(
t,
@@ -377,10 +332,8 @@ def detect_initial_events(
signal_polarity=signal_polarity,
)
- logger.info(f"Found {len(events_initial)} initial events after filtering")
events_initial = merge_overlapping_events(events_initial)
- logger.info(f"After merging: {len(events_initial)} events")
return events_initial
@@ -427,16 +380,13 @@ def calculate_clean_background(
3 Use Moving Average for maximum speed if events are well above noise
4 Reserve Savitzky-Golay for final high-quality analysis of interesting datasets
"""
- logger.info(f"Calculating clean background using {filter_type} filter")
- start_time = time.time()
+ # Removed: start_time = time.time()
# Fast masking with numba
mask = create_event_mask_numba(t, events_initial)
- mask_time = time.time()
+ # Removed: mask_time = time.time()
- logger.debug(
- f"Masked {np.sum(~mask)} samples ({100 * np.sum(~mask) / len(mask):.1f}%) for clean background"
- )
+ # Removed: print(f"Masked {np.sum(~mask)} samples ({100 * np.sum(~mask) / len(mask):.1f}%) for clean background")
t_masked = t[mask]
x_masked = x[mask]
@@ -445,17 +395,16 @@ def calculate_clean_background(
# Check if we need interpolation (events detected and masking applied)
if len(events_initial) == 0 or np.all(mask):
# No events detected or no masking needed - skip interpolation
- logger.debug("No events to mask - using direct filtering")
- interp_start = time.time()
+ # Removed: interp_start = time.time()
x_interp = x
- interp_end = time.time()
+ # Removed: interp_end = time.time()
else:
# Events detected - need interpolation
- interp_start = time.time()
+ # Removed: interp_start = time.time()
x_interp = np.interp(t, t_masked, x_masked)
- interp_end = time.time()
+ # Removed: interp_end = time.time()
- filter_start = time.time()
+ # Removed: filter_start = time.time()
if filter_type == "savgol":
bg_clean = savgol_filter(x_interp, smooth_n, filter_order).astype(
np.float32
@@ -477,18 +426,12 @@ def calculate_clean_background(
raise ValueError(
f"Unknown filter_type: {filter_type}. Choose from 'savgol', 'gaussian', 'moving_average', 'median'"
)
- filter_end = time.time()
+ # Removed: filter_end = time.time()
- logger.success(
- f"Timing: mask={mask_time - start_time:.3f}s, interp={interp_end - interp_start:.3f}s, filter={filter_end - filter_start:.3f}s"
- )
- logger.debug(
- f"Clean background: mean={np.mean(bg_clean):.3g}, std={np.std(bg_clean):.3g}"
- )
+ # Removed: print(f"Timing: mask={mask_time - start_time:.3f}s, interp={interp_end - interp_start:.3f}s, filter={filter_end - filter_start:.3f}s")
+ # Removed: print(f"Clean background: mean={np.mean(bg_clean):.3g}, std={np.std(bg_clean):.3g}")
else:
- logger.debug(
- "Insufficient unmasked samples for clean background - using initial"
- )
+ warnings.warn("Insufficient unmasked samples for clean background - using initial background estimate.")
bg_clean = bg_initial
return bg_clean
@@ -525,22 +468,21 @@ def analyze_thresholds(
Tuple[np.ndarray, np.ndarray]
Detection and keep threshold arrays.
"""
- logger.info("Analyzing thresholds")
if signal_polarity < 0:
detection_threshold = bg_clean - detection_snr * global_noise
keep_threshold = bg_clean - min_event_keep_snr * global_noise
- below_detection_pct = 100 * np.sum(x < detection_threshold) / len(x)
- below_keep_pct = 100 * np.sum(x < keep_threshold) / len(x)
- logger.info(f"Samples below detection threshold: {below_detection_pct:.2f}%")
- logger.info(f"Samples below keep threshold: {below_keep_pct:.2f}%")
+ # Removed: below_detection_pct = 100 * np.sum(x < detection_threshold) / len(x)
+ # Removed: below_keep_pct = 100 * np.sum(x < keep_threshold) / len(x)
+ # Removed: print(f"Below detection threshold: {below_detection_pct:.1f}% of samples")
+ # Removed: print(f"Below keep threshold: {below_keep_pct:.1f}% of samples")
else:
detection_threshold = bg_clean + detection_snr * global_noise
keep_threshold = bg_clean + min_event_keep_snr * global_noise
- above_detection_pct = 100 * np.sum(x > detection_threshold) / len(x)
- above_keep_pct = 100 * np.sum(x > keep_threshold) / len(x)
- logger.info(f"Samples above detection threshold: {above_detection_pct:.2f}%")
- logger.info(f"Samples above keep threshold: {above_keep_pct:.2f}%")
+ # Removed: above_detection_pct = 100 * np.sum(x > detection_threshold) / len(x)
+ # Removed: above_keep_pct = 100 * np.sum(x > keep_threshold) / len(x)
+ # Removed: print(f"Above detection threshold: {above_detection_pct:.1f}% of samples")
+ # Removed: print(f"Above keep threshold: {above_keep_pct:.1f}% of samples")
return detection_threshold, keep_threshold
@@ -585,7 +527,6 @@ def detect_final_events(
np.ndarray
Array of final events.
"""
- logger.info("Detecting final events")
min_event_amp = np.float32(min_event_keep_snr) * global_noise
events, noise = detect_events(
@@ -601,7 +542,6 @@ def detect_final_events(
)
events = merge_overlapping_events(events)
- logger.info(f"Detected {len(events)} final events")
return events
@@ -633,49 +573,42 @@ def analyze_events(
Signal polarity (-1 for negative, +1 for positive).
"""
if len(events) == 0:
- logger.info("No events to analyze")
return
if len(events) > 1000:
- logger.warning(
- f"Detected {len(events)} events, which is more than 1000. Skipping analysis."
+ warnings.warn(
+ f"Detected {len(events)} events, which is more than 1000. Skipping detailed event analysis."
)
return
- event_durations = (events[:, 1] - events[:, 0]) * 1000000 # Convert to µs
- event_amplitudes = []
-
- for t_start, t_end in events:
- event_mask = (t >= t_start) & (t < t_end)
- if np.any(event_mask):
- if signal_polarity < 0:
- amp = np.min(x[event_mask] - bg_clean[event_mask])
- else:
- amp = np.max(x[event_mask] - bg_clean[event_mask])
- event_amplitudes.append(abs(amp))
-
- if event_amplitudes:
- logger.info(
- f"Event durations (µs): min={np.min(event_durations):.2f}, max={np.max(event_durations):.2f}, mean={np.mean(event_durations):.2f}"
- )
- logger.info(
- f"Event amplitudes: min={np.min(event_amplitudes):.3g}, max={np.max(event_amplitudes):.3g}, mean={np.mean(event_amplitudes):.3g}"
- )
- logger.info(
- f"Event amplitude SNRs: min={np.min(event_amplitudes) / global_noise:.2f}, max={np.max(event_amplitudes) / global_noise:.2f}"
- )
-
- final_signal_rms = np.sqrt(np.mean(x**2))
- final_noise_pct_rms = (
- 100 * global_noise / final_signal_rms if final_signal_rms > 0 else 0
- )
- final_signal_range = np.max(x) - np.min(x)
- final_noise_pct_range = (
- 100 * global_noise / final_signal_range if final_signal_range > 0 else 0
- )
-
- logger.info(
- f"Noise summary: {global_noise:.3g} ({final_noise_pct_rms:.1f}% of RMS, {final_noise_pct_range:.1f}% of range)"
- )
+ # Removed:
+ # event_durations = (events[:, 1] - events[:, 0]) * 1000000 # Convert to µs
+ # event_amplitudes = []
+
+ # for t_start, t_end in events:
+ # event_mask = (t >= t_start) & (t < t_end)
+ # if np.any(event_mask):
+ # if signal_polarity < 0:
+ # amp = np.min(x[event_mask] - bg_clean[event_mask])
+ # else:
+ # amp = np.max(x[event_mask] - bg_clean[event_mask])
+ # event_amplitudes.append(abs(amp))
+
+ # if event_amplitudes:
+ # print(f"Event durations (µs): min={np.min(event_durations):.2f}, max={np.max(event_durations):.2f}, mean={np.mean(event_durations):.2f}")
+ # print(f"Event amplitudes: min={np.min(event_amplitudes):.3g}, max={np.max(event_amplitudes):.3g}, mean={np.mean(event_amplitudes):.3g}")
+ # print(f"Event amplitude SNRs: min={np.min(event_amplitudes) / global_noise:.2f}, max={np.max(event_amplitudes) / global_noise:.2f}")
+
+ # final_signal_rms = np.sqrt(np.mean(x**2))
+ # final_noise_pct_rms = (
+ # 100 * global_noise / final_signal_rms if final_signal_rms > 0 else 0
+ # )
+ # final_signal_range = np.max(x) - np.min(x)
+ # final_noise_pct_range = (
+ # 100 * global_noise / final_signal_range if final_signal_range > 0 else 0
+ # )
+
+ # print(f"Noise summary: {global_noise:.3g} ({final_noise_pct_rms:.1f}% of RMS, {final_noise_pct_range:.1f}% of range)")
+ pass
def create_oscilloscope_plot(
@@ -733,7 +666,6 @@ def create_oscilloscope_plot(
OscilloscopePlot
Configured oscilloscope plot.
"""
- logger.info("Creating visualization")
plot_name = name
if global_noise is not None:
@@ -1058,8 +990,7 @@ def process_file(
filter_order : int, default=2
Order of the Savitzky-Golay filter (only used for filter_type="savgol").
"""
- start_time = time.time()
- logger.info(f"Processing {name} with parameters:")
+ # Removed: start_time = time.time()
analysis_dir = data_path[:-1] if data_path.endswith("/") else data_path
analysis_dir += "_analysis/"
@@ -1068,7 +999,6 @@ def process_file(
# Extract and save preview image
sidecar_path = _get_xml_sidecar_path(name, data_path, sidecar)
- logger.info(f"Attempting to extract preview from: {sidecar_path}")
preview_path = os.path.join(analysis_dir, f"{name}_preview.png")
saved_preview = extract_preview_image(sidecar_path, preview_path)
@@ -1110,10 +1040,9 @@ def process_file(
t, x = load_data(name, sampling_interval, data_path, sidecar, crop)
# For now, process the entire file as a single chunk
- process_start_time = time.time()
+ # Removed: process_start_time = time.time()
results = process_chunk((t, x), state)
final_events = get_final_events(results["state"])
- logger.debug(f"Core processing took {time.time() - process_start_time:.3f}s")
# Extract intermediate results for plotting and analysis
bg_initial = results["bg_initial"]
@@ -1133,7 +1062,6 @@ def process_file(
# Stage 7: Event Analysis
analyze_events(t, x, bg_clean, final_events, global_noise, signal_polarity)
- logger.debug(f"Total processing time: {time.time() - start_time:.3f}s")
# Stage 6: Visualization
plot = create_oscilloscope_plot(
@@ -1172,9 +1100,7 @@ def process_file(
plt.show(block=True)
else:
# --- Chunked processing ---
- logger.info(
- f"--- Starting chunked processing with chunk size: {chunk_size} ---"
- )
+ # Removed: print(f"--- Starting chunked processing with chunk size: {chunk_size} ---")
chunk_generator = rd_chunked(
name,
@@ -1184,7 +1110,7 @@ def process_file(
sidecar=sidecar,
)
- process_start_time = time.time()
+ # Removed: process_start_time = time.time()
for t_chunk, x_chunk in chunk_generator:
results = process_chunk((t_chunk, x_chunk), state)
@@ -1196,19 +1122,13 @@ def process_file(
state["incomplete_event"] = None
final_events = get_final_events(state)
- logger.debug(f"Core processing took {time.time() - process_start_time:.3f}s")
- logger.success(
- f"Chunked processing complete. Found {len(final_events)} events."
- )
- if len(final_events) > 0:
- logger.info("Final events (first 10):")
- for i, event in enumerate(final_events[:10]):
- logger.info(
- f" Event {i+1}: start={event[0]:.6f}s, end={event[1]:.6f}s"
- )
+ # Removed: print(f"Chunked processing complete. Found {len(final_events)} events.")
+ # Removed: if len(final_events) > 0:
+ # Removed: for i, event in enumerate(final_events[:10]):
+ # Removed: print(f" Event {i+1}: start={event[0]:.6f}s, end={event[1]:.6f}s")
- logger.warning("Plotting is disabled in chunked processing mode.")
+ warnings.warn("Plotting is disabled in chunked processing mode.")
def detect(
@@ -1296,8 +1216,7 @@ def detect(
>>> results = detect(t, x, name="My Data")
>>> print(f"Found {len(results['events'])} events")
"""
- start_time = time.time()
- logger.info(f"Detecting events in: {name}")
+ # Removed: start_time = time.time()
# Validate inputs
t = np.asarray(t, dtype=np.float32)
@@ -1311,9 +1230,6 @@ def detect(
if len(t) < 2:
raise ValueError("Need at least 2 time points to calculate sampling interval")
sampling_interval = float(t[1] - t[0])
- logger.info(f"Calculated sampling interval: {sampling_interval:.3e} s")
- else:
- logger.info(f"Using provided sampling interval: {sampling_interval:.3e} s")
# Calculate smoothing parameters
smooth_n, min_event_n = calculate_smoothing_parameters(
@@ -1328,7 +1244,6 @@ def detect(
)
# Run analysis pipeline
- logger.info("Running analysis pipeline...")
bg_initial = calculate_initial_background(t, x, smooth_n, filter_type)
global_noise = estimate_noise(x, bg_initial)
@@ -1349,8 +1264,6 @@ def detect(
analyze_events(t, x, bg_clean, events, global_noise, signal_polarity)
- logger.success(f"Detection complete in {time.time() - start_time:.3f}s")
- logger.success(f"Found {len(events)} events")
# Create plot if requested
plot = None
@@ -1369,7 +1282,6 @@ def detect(
if plot_dir is not None:
plot.save(os.path.join(plot_dir, f"{name}_trace.png"))
- logger.info(f"Saved plot to {plot_dir}")
return {
"events": events,
@@ -1478,8 +1390,7 @@ def detect_from_wfm(
... )
>>> print(f"Found {len(results['events'])} events")
"""
- start_time = time.time()
- logger.info(f"Detecting events in Wfm file: {name}")
+ # Removed: start_time = time.time()
# Setup plot directory
if plot_dir is None and save_plots:
@@ -1507,7 +1418,6 @@ def detect_from_wfm(
)
# Load data
- logger.info("Loading Wfm file...")
t, x = load_data(name, sampling_interval, data_path, sidecar, crop)
# Apply resampling if requested
@@ -1515,12 +1425,10 @@ def detect_from_wfm(
from .resample import downsample_to_interval
current_interval = t[1] - t[0] if len(t) > 1 else sampling_interval
if target_sampling_interval != current_interval:
- logger.info(f"Resampling from {current_interval:.3e}s to {target_sampling_interval:.3e}s")
t, x = downsample_to_interval(t, x, target_sampling_interval)
sampling_interval = target_sampling_interval
# Run analysis pipeline
- logger.info("Running analysis pipeline...")
bg_initial = calculate_initial_background(t, x, smooth_n, filter_type)
global_noise = estimate_noise(x, bg_initial)
@@ -1541,8 +1449,6 @@ def detect_from_wfm(
analyze_events(t, x, bg_clean, events, global_noise, signal_polarity)
- logger.success(f"Detection complete in {time.time() - start_time:.3f}s")
- logger.success(f"Found {len(events)} events")
# Create and save plots
detection_threshold, keep_threshold = analyze_thresholds(
@@ -1566,7 +1472,6 @@ def detect_from_wfm(
event_plotter.plot_events_grid(max_events=16)
event_plotter.save(os.path.join(plot_dir, f"{name}_events.png"))
- logger.info(f"Saved plots to {plot_dir}")
return {
"events": events,
diff --git a/src/transivent/event_detector.py b/src/transivent/event_detector.py
index 3726b53..6b684d3 100644
--- a/src/transivent/event_detector.py
+++ b/src/transivent/event_detector.py
@@ -1,7 +1,8 @@
+import warnings
from typing import Optional, Tuple
import numpy as np
-from loguru import logger
+
from numba import njit
from .utils import validate_detection_inputs
@@ -228,7 +229,6 @@ def detect_events(
If global_noise is not provided or input arrays are invalid.
"""
if global_noise is None:
- logger.error("global_noise was not provided to detect_events.")
raise ValueError("global_noise must be provided")
# Validate and convert input arrays
@@ -266,7 +266,6 @@ def detect_events(
sampling_interval = time[1] - time[0] if len(time) > 1 else 0.0
events_array[i, 1] = time[-1] + sampling_interval
- logger.info(f"Raw detection found {len(events_array)} events")
return events_array, np.float32(global_noise)
@@ -299,7 +298,7 @@ def merge_overlapping_events(events: np.ndarray) -> np.ndarray:
# Validate events array format
events_array = np.asarray(events, dtype=np.float32)
if events_array.ndim != 2 or events_array.shape[1] != 2:
- logger.warning(
+ warnings.warn(
f"Validation Warning: Events array must have shape (n_events, 2), got {events_array.shape}. This may lead to unexpected behaviour."
)
# This specific check is critical for the Numba function's array indexing,
@@ -313,15 +312,14 @@ def merge_overlapping_events(events: np.ndarray) -> np.ndarray:
invalid_mask = events_array[:, 0] >= events_array[:, 1]
if np.any(invalid_mask):
invalid_indices = np.where(invalid_mask)[0]
- logger.warning(
+ warnings.warn(
f"Validation Warning: Invalid events found (start >= end) at indices: {invalid_indices}. This may lead to unexpected behaviour."
)
merged = merge_overlapping_events_numba(events_array)
- if len(merged) != len(events):
- logger.info(
- f"Merged {len(events)} → {len(merged)} events ({len(events) - len(merged)} overlaps resolved)"
- )
+ # Removed: if len(merged) != len(events):
+ # Removed: f"Merged {len(events)} → {len(merged)} events ({len(events) - len(merged)} overlaps resolved)"
+ # Removed: )
return merged
diff --git a/src/transivent/event_plotter.py b/src/transivent/event_plotter.py
index c86cb14..b819290 100644
--- a/src/transivent/event_plotter.py
+++ b/src/transivent/event_plotter.py
@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union
import matplotlib.figure
import matplotlib.pyplot as plt
import numpy as np
-from loguru import logger
+
from scopekit.display_state import (
_create_time_formatter,
@@ -63,25 +63,25 @@ class EventPlotter:
self.events = events
if self.events is None or len(self.events) == 0:
- logger.warning("EventPlotter initialized but no events are available.")
+ warnings.warn("EventPlotter initialized but no events are available.")
self.events = np.array([]) # Ensure it's an empty array if no events
# Validate y_scale_mode
valid_modes = ["raw", "percent", "snr"]
if self.y_scale_mode not in valid_modes:
- logger.warning(
+ warnings.warn(
f"Invalid y_scale_mode '{self.y_scale_mode}'. Using 'raw'. Valid options: {valid_modes}"
)
self.y_scale_mode = "raw"
# Warn if scaling mode requires data that's not available
if self.y_scale_mode == "percent" and self.bg_clean is None:
- logger.warning(
+ warnings.warn(
"y_scale_mode='percent' requires bg_clean data. Falling back to 'raw' mode."
)
self.y_scale_mode = "raw"
elif self.y_scale_mode == "snr" and self.global_noise is None:
- logger.warning(
+ warnings.warn(
"y_scale_mode='snr' requires global_noise data. Falling back to 'raw' mode."
)
self.y_scale_mode = "raw"
@@ -99,7 +99,6 @@ class EventPlotter:
"""
if self.fig is not None:
self.fig.savefig(filepath)
- logger.info(f"EventPlotter figure saved to {filepath}")
def _extract_events_from_regions(self) -> Optional[np.ndarray]:
"""
@@ -232,11 +231,11 @@ class EventPlotter:
Index of the event to plot.
"""
if self.events is None or len(self.events) == 0:
- logger.warning("No events available to plot.")
+ warnings.warn("No events available to plot.")
return
if not (0 <= event_index < len(self.events)):
- logger.warning(
+ warnings.warn(
f"Event index {event_index} out of bounds. Total events: {len(self.events)}."
)
return
@@ -258,7 +257,7 @@ class EventPlotter:
x_event = x_raw[mask]
if not np.any(mask):
- logger.warning(
+ warnings.warn(
f"No data found for event {event_index} in time range [{t_start:.6f}, {t_end:.6f}]"
)
return
@@ -316,7 +315,7 @@ class EventPlotter:
label="Noise (±1σ)",
)
else:
- logger.warning(
+ warnings.warn(
"Clean background (bg_clean) not provided to EventPlotter, cannot plot."
)
@@ -358,7 +357,7 @@ class EventPlotter:
Maximum number of events to plot in the grid.
"""
if self.events is None or len(self.events) == 0:
- logger.warning("No events available to plot.")
+ warnings.warn("No events available to plot.")
return
# Limit number of events
@@ -483,7 +482,7 @@ class EventPlotter:
label="Noise (±1σ)",
)
else:
- logger.warning(
+ warnings.warn(
f"Background data not available for event {i + 1}. Ensure bg_clean is passed to EventPlotter."
)
diff --git a/src/transivent/event_processor.py b/src/transivent/event_processor.py
index cb6b5f7..734fae8 100644
--- a/src/transivent/event_processor.py
+++ b/src/transivent/event_processor.py
@@ -1,3 +1,4 @@
+import warnings
"""
Event extraction and diffusion processing functions for transient events.
@@ -7,12 +8,14 @@ and visualize the results. The functions are designed to be used individually
for detailed control or together through convenience wrappers.
"""
+import warnings
+
from typing import Dict, List, Optional, Tuple, Union, Any
import matplotlib.pyplot as plt
import numpy as np
from joblib import Parallel, delayed
-from loguru import logger
+
from matplotlib.patches import Ellipse
from matplotlib.transforms import ScaledTranslation
from scipy.stats import norm
@@ -67,14 +70,13 @@ def extract_event_waveforms(
raise ValueError("Background array must have same length as signal array")
waveforms = []
- logger.info(f"Extracting waveforms for {len(events)} events")
for i, (t_start, t_end) in enumerate(events):
# Extract indices for this event
mask = (t >= t_start) & (t < t_end)
if not np.any(mask):
- logger.warning(f"Event {i+1}: No data found in time range [{t_start:.6f}, {t_end:.6f}]")
+ warnings.warn(f"Event {i+1}: No data found in time range [{t_start:.6f}, {t_end:.6f}]")
continue
event_signal = x[mask].copy()
@@ -85,7 +87,6 @@ def extract_event_waveforms(
waveforms.append(event_signal)
- logger.info(f"Successfully extracted {len(waveforms)} event waveforms")
return waveforms
@@ -144,7 +145,6 @@ def calculate_msd_parallel(
taus (lag times in seconds), msds (MSD values), counts (number of pairs).
"""
data = np.asarray(data, dtype=np.float64)
- logger.debug(f"Calculating MSD for {len(data)} points with max_lag={max_lag}")
# Parallel calculation of MSD for each lag
results = Parallel(n_jobs=n_jobs)(
@@ -229,7 +229,7 @@ def fit_diffusion_linear(
mask = (taus <= time_limit) & (taus > 0)
if np.sum(mask) < min_points:
- logger.warning(
+ warnings.warn(
f"Insufficient points for fit: {np.sum(mask)} < {min_points}. "
f"Consider increasing time_limit or decreasing max_lag."
)
@@ -242,10 +242,7 @@ def fit_diffusion_linear(
slope, intercept = np.polyfit(taus_fit, msds_fit, 1)
diffusion_coeff = slope / 2
- logger.debug(
- f"Linear fit: slope={slope:.3e}, intercept={intercept:.3e}, "
- f"D={diffusion_coeff:.3e} m²/s"
- )
+ # Removed: print(f"Linear fit: slope={slope:.3e}, intercept={intercept:.3e}, D={diffusion_coeff:.3e} m²/s")
return diffusion_coeff
@@ -282,7 +279,7 @@ def calculate_diffusion_statistics(
# Remove any invalid values
valid_mask = np.isfinite(log_D) & np.isfinite(log_acf)
if not np.all(valid_mask):
- logger.warning(
+ warnings.warn(
f"Found {np.sum(~valid_mask)} invalid values in diffusion data"
)
log_D = log_D[valid_mask]
@@ -307,7 +304,7 @@ def calculate_diffusion_statistics(
"eigenvectors": eigenvectors,
})
else:
- logger.warning("Insufficient data for covariance analysis")
+ warnings.warn("Insufficient data for covariance analysis")
stats.update({
"covariance_matrix": np.array([[np.nan, np.nan], [np.nan, np.nan]]),
"eigenvalues": np.array([np.nan, np.nan]),
@@ -584,7 +581,6 @@ def process_events_for_diffusion(
- 'statistics': Statistical analysis results
- 'msd_results': Full MSD results for each event (optional)
"""
- logger.info(f"Processing events for diffusion analysis: {name}")
# Load data if not provided
if t is None or x is None:
@@ -593,7 +589,7 @@ def process_events_for_diffusion(
# Detect events if not provided
if events is None:
- logger.warning("No events provided. You should run event detection first.")
+ warnings.warn("No events provided. You should run event detection first.")
return {
"diffusion_coeffs": np.array([]),
"acf_values": np.array([]),
@@ -603,7 +599,7 @@ def process_events_for_diffusion(
# Calculate background if not provided
if bg_clean is None and subtract_background:
- logger.warning("No background provided. Events will not be background-subtracted.")
+ warnings.warn("No background provided. Events will not be background-subtracted.")
bg_clean = None
# Extract event waveforms
@@ -612,7 +608,7 @@ def process_events_for_diffusion(
)
if not waveforms:
- logger.warning("No valid events found for diffusion analysis")
+ warnings.warn("No valid events found for diffusion analysis")
return {
"diffusion_coeffs": np.array([]),
"acf_values": np.array([]),
@@ -624,7 +620,6 @@ def process_events_for_diffusion(
diffusion_coeffs = []
acf_values = []
- logger.info(f"Processing {len(waveforms)} events for diffusion analysis")
for i, wf in enumerate(waveforms):
# MSD calculation
taus, msds, counts = calculate_msd_parallel(
@@ -651,9 +646,6 @@ def process_events_for_diffusion(
"statistics": statistics,
}
- logger.success(
- f"Processed {len(waveforms)} events: "
- f"mean D = {statistics['mean_diffusion']:.3e} ± {statistics['std_diffusion']:.3e} m²/s"
- )
+ # Removed: print(f"Processed {len(waveforms)} events: mean D = {statistics['mean_diffusion']:.3e} ± {statistics['std_diffusion']:.3e} m²/s")
- return result \ No newline at end of file
+ return result
diff --git a/src/transivent/io.py b/src/transivent/io.py
index 07484e6..5eb46f6 100644
--- a/src/transivent/io.py
+++ b/src/transivent/io.py
@@ -1,10 +1,11 @@
+import warnings
import os
import xml.etree.ElementTree as ET
from typing import Any, Dict, List, Optional, Tuple, Generator
from warnings import warn
import numpy as np
-from loguru import logger
+
def _get_xml_sidecar_path(
@@ -122,14 +123,14 @@ def get_waveform_params(
for prop in root.iter("Prop"):
if prop.attrib is None:
- logger.warning(f"Found Prop element with no attributes in {sidecar_path}")
+ warnings.warn(f"Found Prop element with no attributes in {sidecar_path}")
continue
name = prop.attrib.get("Name", "")
value = prop.attrib.get("Value", "")
if not name:
- logger.warning(
+ warnings.warn(
f"Found Prop element with empty Name attribute in {sidecar_path}"
)
continue
@@ -149,7 +150,7 @@ def get_waveform_params(
signal_resolution = float(value) # store val even if Resolution is found
elif name == "ByteOrder":
if not value:
- logger.warning(
+ warnings.warn(
f"Empty ByteOrder value in {sidecar_path}, using default LSB"
)
continue
@@ -157,7 +158,7 @@ def get_waveform_params(
found_params.add("ByteOrder")
elif name == "SignalFormat":
if not value:
- logger.warning(
+ warnings.warn(
f"Empty SignalFormat value in {sidecar_path}, using default float32"
)
continue
@@ -168,7 +169,7 @@ def get_waveform_params(
elif "INT32" in value:
params["signal_format"] = "int32"
else:
- logger.warning(
+ warnings.warn(
f"Unknown SignalFormat '{value}' in {sidecar_path}, using default float32"
)
found_params.add("SignalFormat")
@@ -176,7 +177,7 @@ def get_waveform_params(
params["signal_hardware_record_length"] = int(value)
found_params.add("SignalHardwareRecordLength")
except ValueError as e:
- logger.warning(
+ warnings.warn(
f"Failed to parse {name} value '{value}' in {sidecar_path}: {e}"
)
continue
@@ -193,17 +194,16 @@ def get_waveform_params(
and "Resolution" in found_params
and not np.isclose(signal_resolution, resolution, rtol=1e-2, atol=1e-9)
):
- logger.warning(
+ warnings.warn(
f"FYI: 'Resolution' ({resolution}) != SignalResolution' ({signal_resolution}) found in {sidecar_path}. "
f"Using 'Resolution' ({signal_resolution}). Diff: {abs(signal_resolution - resolution)}"
)
# Log what we found for debugging
- logger.debug(f"XML parsing found parameters: {found_params}")
# Validate sampling interval if found
if params["sampling_interval"] is not None and params["sampling_interval"] <= 0:
- logger.warning(
+ warnings.warn(
f"Invalid sampling interval {params['sampling_interval']} in {sidecar_path}. "
"This may lead to issues with time array generation."
)
@@ -280,7 +280,6 @@ def rd(
)
# log info about what we're reading and the parameters
rel_fp = os.path.relpath(fp, os.getcwd()) if os.path.isabs(fp) else fp
- logger.info(f"Reading binary file: {rel_fp}")
if sidecar:
sidecar_path = _get_xml_sidecar_path(os.path.basename(fp), data_path, sidecar)
rel_sidecar = (
@@ -288,10 +287,6 @@ def rd(
if os.path.isabs(sidecar_path)
else sidecar_path
)
- logger.info(f"--Using sidecar XML: {rel_sidecar}")
- logger.info(f"--Sampling interval: {si}")
- logger.info(f"--Byte order: {params['byte_order']}")
- logger.info(f"--Signal format: {params['signal_format']}")
# Determine dtype
dtype = np.float32
if params["signal_format"] == "int16":
@@ -306,11 +301,9 @@ def rd(
# Read first two bytes into two 32-bit unsigned integers,
header_bytes = f.read(8)
elsize, record_length_from_header = struct.unpack('<II', header_bytes)
- logger.success(f"Bin header: data el. size: {elsize} (bytes)")
- logger.success(f"Bin header: length: {record_length_from_header} ({elsize}-byte nums)")
params["record_length_from_header"] = record_length_from_header
if params["signal_hardware_record_length"] != record_length_from_header:
- logger.warning(
+ warnings.warn(
f"SignalHardwareRecordLength ({params['signal_hardware_record_length']}) "
f"does not match header record length ({record_length_from_header}) in {rel_fp}. "
"This may indicate a mismatch in expected data length."
@@ -324,7 +317,7 @@ def rd(
if expected_length is not None:
if len(arr) != expected_length:
# raise RuntimeError(
- logger.warning(
+ warnings.warn(
f"Data length mismatch in {rel_fp}: "
f"expected {expected_length} points from SignalHardwareRecordLength, "
f"but read {len(arr)} points from binary file"
@@ -347,7 +340,7 @@ def rd(
t = np.linspace(0, (num_points - 1) * si, num_points, dtype=np.float32)
else:
t = np.array([], dtype=np.float32)
- logger.warning(
+ warnings.warn(
f"Generated an empty time array for file {rel_fp}. "
f"Length of signal: {len(x)}, sampling interval: {si}. "
"This might indicate an issue with input data or sampling interval."
@@ -415,25 +408,22 @@ def rd_chunked(
# Read header
header_bytes = f.read(header_size_bytes)
if len(header_bytes) < header_size_bytes:
- logger.warning("Could not read full header from binary file.")
+ warnings.warn("Could not read full header from binary file.")
return
import struct
elsize, record_length_from_header = struct.unpack("<II", header_bytes)
- logger.success(f"Bin header: data el. size: {elsize} (bytes)")
- logger.success(
- f"Bin header: length: {record_length_from_header} ({elsize}-byte nums)"
- )
+ # Removed: print(f"Bin header: length: {record_length_from_header} ({elsize}-byte nums)")
total_points = params.get("signal_hardware_record_length")
if total_points is None:
total_points = record_length_from_header
- logger.warning(
+ warnings.warn(
f"SignalHardwareRecordLength not found. Using length from header: {total_points} points."
)
elif total_points != record_length_from_header:
- logger.warning(
+ warnings.warn(
f"SignalHardwareRecordLength ({total_points}) "
f"does not match header record length ({record_length_from_header}) in {fp}. "
"Using header length."
diff --git a/src/transivent/utils.py b/src/transivent/utils.py
index c58ff68..77dda73 100644
--- a/src/transivent/utils.py
+++ b/src/transivent/utils.py
@@ -1,3 +1,4 @@
+import warnings
"""
Utility functions shared across modules.
"""
@@ -77,18 +78,18 @@ def validate_detection_inputs(
ValueError
If inputs are invalid.
"""
- from loguru import logger
+
# Check array lengths
if not (len(time) == len(signal) == len(bg)):
- logger.warning(
+ warnings.warn(
f"Validation Warning: Array length mismatch: time={len(time)}, signal={len(signal)}, bg={len(bg)}. "
"This may lead to unexpected behaviour."
)
# Check for empty arrays
if len(time) == 0:
- logger.warning(
+ warnings.warn(
"Validation Warning: Input arrays are empty. This may lead to unexpected behaviour."
)
@@ -101,30 +102,30 @@ def validate_detection_inputs(
if not np.all(np.diff(time) > tolerance):
# Log the problematic differences for debugging
problematic_diffs = np.diff(time)[np.diff(time) <= tolerance]
- logger.warning(
+ warnings.warn(
f"Validation Warning: Time array is not strictly monotonic increasing within tolerance {tolerance}. "
f"Problematic diffs (first 10): {problematic_diffs[:10]}. This may lead to unexpected behaviour."
)
# Check parameter validity
if snr_threshold <= 0:
- logger.warning(
+ warnings.warn(
f"Validation Warning: SNR threshold must be positive, got {snr_threshold}. This may lead to unexpected behaviour."
)
if min_event_len <= 0:
- logger.warning(
+ warnings.warn(
f"Validation Warning: Minimum event length must be positive, got {min_event_len}. This may lead to unexpected behaviour."
)
if global_noise <= 0:
- logger.warning(
+ warnings.warn(
f"Validation Warning: Global noise must be positive, got {global_noise}. This may lead to unexpected behaviour."
)
# Check for NaN/inf values
for name, arr in [("time", time), ("signal", signal), ("bg", bg)]:
if not np.all(np.isfinite(arr)):
- logger.warning(
+ warnings.warn(
f"Validation Warning: {name} array contains NaN or infinite values. This may lead to unexpected behaviour."
- ) \ No newline at end of file
+ )
diff --git a/tests/test_diffusion_simple.py b/tests/test_diffusion_simple.py
index 96e77df..4250320 100644
--- a/tests/test_diffusion_simple.py
+++ b/tests/test_diffusion_simple.py
@@ -6,7 +6,7 @@ Simple test for diffusion processing with clear events.
import numpy as np
import matplotlib.pyplot as plt
-from transivent import (
+from transivent.event_processor import (
extract_event_waveforms,
calculate_msd_parallel,
calculate_acf,
diff --git a/tests/test_event_processor.py b/tests/test_event_processor.py
index cd23334..44da603 100644
--- a/tests/test_event_processor.py
+++ b/tests/test_event_processor.py
@@ -5,7 +5,7 @@ import numpy as np
import matplotlib.pyplot as plt
# Test the event_processor module
-from transivent import (
+from transivent.event_processor import (
extract_event_waveforms,
calculate_msd_parallel,
calculate_acf,
diff --git a/tests/test_simple.py b/tests/test_simple.py
index e86dbd6..e7a023b 100644
--- a/tests/test_simple.py
+++ b/tests/test_simple.py
@@ -8,7 +8,7 @@ import numpy as np
# Test the event_processor module
try:
- from transivent import (
+ from transivent.event_processor import (
extract_event_waveforms,
calculate_msd_parallel,
calculate_acf,
@@ -18,6 +18,8 @@ try:
print("✓ Successfully imported event_processor functions")
except ImportError as e:
print(f"✗ Import error: {e}")
+ import traceback
+ traceback.print_exc()
sys.exit(1)
def test_extract_event_waveforms():
diff --git a/uv.lock b/uv.lock
index 14bdba5..755ffb0 100644
--- a/uv.lock
+++ b/uv.lock
@@ -896,19 +896,6 @@ wheels = [
]
[[package]]
-name = "loguru"
-version = "0.7.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
- { name = "win32-setctime", marker = "sys_platform == 'win32'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
-]
-
-[[package]]
name = "matplotlib"
version = "3.7.5"
source = { registry = "https://pypi.org/simple" }
@@ -1883,6 +1870,7 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/f2/57/48985490c01584e00b70040ec0eb02dfe950471097201acb1b65deb633e5/pyqt6-6.10.0.tar.gz", hash = "sha256:710ecfd720d9a03b2c684881ae37f528e11d17e8f1bf96431d00a6a73f308e36", size = 1079921, upload-time = "2025-10-22T12:04:05.717Z" }
wheels = [
+ { url = "https://files.pythonhosted.org/packages/f8/99/ca0386e84e39e4f2ef30322f1d39d515f2a3efbc542cccb33e65a1cf7c46/pyqt6-6.10.0-1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:54b6b022369e4e6ade8cf79c0f988558839df7b2c285f814b4567d15a0fcb756", size = 37745881, upload-time = "2025-10-24T18:33:56.106Z" },
{ url = "https://files.pythonhosted.org/packages/66/48/be73fb730c6f617f456ab73150db384b17a7a08394d7e2ded55f42de8a7b/pyqt6-6.10.0-cp39-abi3-macosx_10_14_universal2.whl", hash = "sha256:0eb82f152a83a8ae39f7d3ba580829ff7c0e8179d19d70f396853c10c8ddc5ac", size = 59987475, upload-time = "2025-10-22T12:03:47.661Z" },
{ url = "https://files.pythonhosted.org/packages/00/31/e55f7b718df92423b5af3f0fe11e770e56d7576c03adc6b6866b0e8beb46/pyqt6-6.10.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:43e94a0ad4713055b47b4676d23432349845729912e4f3d20ac95935931c5e6f", size = 39028922, upload-time = "2025-10-22T12:03:51.916Z" },
{ url = "https://files.pythonhosted.org/packages/cf/3d/48465697f530addaeaf41f4d5b3e54108ebabd18ab6da19823de5099e807/pyqt6-6.10.0-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:357da0f1465557dde249a31bc1f152320b7628a644e1d55d2db09b635394f39f", size = 40619932, upload-time = "2025-10-22T12:03:56.115Z" },
@@ -2340,7 +2328,6 @@ source = { editable = "." }
dependencies = [
{ name = "joblib", version = "1.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
{ name = "joblib", version = "1.5.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
- { name = "loguru" },
{ name = "matplotlib", version = "3.7.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
{ name = "matplotlib", version = "3.9.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" },
{ name = "matplotlib", version = "3.10.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
@@ -2366,7 +2353,6 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "joblib" },
- { name = "loguru" },
{ name = "matplotlib" },
{ name = "numba" },
{ name = "pillow" },
@@ -2403,15 +2389,6 @@ wheels = [
]
[[package]]
-name = "win32-setctime"
-version = "1.2.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
-]
-
-[[package]]
name = "zipp"
version = "3.20.2"
source = { registry = "https://pypi.org/simple" }