aboutsummaryrefslogtreecommitdiff

transivent

transivent is a Python library for detecting and analysing transient events (~spikes) in time-series data. It provides a flexible and configurable pipeline for processing waveform data, identifying events based on signal-to-noise ratio, and visualizing the results. The library is designed to handle large files efficiently through chunked processing.

Quick Start

The primary entrypoint for analysis is the transivent.process_file function. The analysis pipeline is controlled via a configuration dictionary, and the library provides functions to read waveform data from binary files with XML sidecars.

Here is a brief example based on example.py:

from transivent import process_file, get_waveform_params

# 1. Define the analysis configuration
CONFIG = {
    "DATA_PATH": "path/to/your/data/",
    "SMOOTH_WIN_T": 10e-3,
    "DETECTION_SNR": 3,
    "MIN_EVENT_KEEP_SNR": 5,
    "SIGNAL_POLARITY": 1,
    "CHUNK_SIZE": 1_000_000,  # Set to a value to enable chunking
    # ... and more
}

# 2. Define the measurement file
measurement = {
    "data": "RefCurve_2025-07-17_0_065114.Wfm.bin",
}

# 3. Merge configs and get waveform parameters
config = {**CONFIG, **measurement}
params = get_waveform_params(
    config["data"], data_path=config["DATA_PATH"]
)

# 4. Run the processing pipeline
process_file(
    name=config["data"],
    data_path=config["DATA_PATH"],
    sampling_interval=params["sampling_interval"],
    smooth_win_t=config.get("SMOOTH_WIN_T"),
    detection_snr=config.get("DETECTION_SNR"),
    min_event_keep_snr=config.get("MIN_EVENT_KEEP_SNR"),
    signal_polarity=config.get("SIGNAL_POLARITY"),
    chunk_size=config.get("CHUNK_SIZE"),
    # ... other parameters
)

API Documentation

The public interface of transivent is defined in the main __init__.py package.


analyze_thresholds

def analyze_thresholds(x: np.ndarray, bg_clean: np.ndarray, global_noise: np.float32, detection_snr: float, min_event_keep_snr: float, signal_polarity: int) -> Tuple[np.ndarray, np.ndarray]

Analyze threshold statistics and create threshold arrays.

calculate_initial_background

def calculate_initial_background(t: np.ndarray, x: np.ndarray, smooth_n: int, filter_type: str = "gaussian") -> np.ndarray

Calculate initial background estimate.

calculate_smoothing_parameters

def calculate_smoothing_parameters(sampling_interval: float, smooth_win_t: Optional[float], smooth_win_f: Optional[float], min_event_t: float, detection_snr: float, min_event_keep_snr: float, widen_frac: float, signal_polarity: int) -> Tuple[int, int]

Calculate smoothing window size and minimum event length in samples.

configure_logging

def configure_logging(log_level: str = "INFO") -> None

Configure loguru logging with specified level.

create_oscilloscope_plot

def create_oscilloscope_plot(t: np.ndarray, x: np.ndarray, bg_initial: np.ndarray, bg_clean: np.ndarray, events: np.ndarray, detection_threshold: np.ndarray, keep_threshold: np.ndarray, name: str, detection_snr: float, min_event_keep_snr: float, max_plot_points: int, envelope_mode_limit: float, smooth_n: int, global_noise: Optional[np.float32] = None) -> OscilloscopePlot

Create oscilloscope plot with all visualization elements.

get_final_events

def get_final_events(state: Dict[str, Any]) -> np.ndarray

Extract and finalise the list of detected events from the state.

initialize_state

def initialize_state(config: Dict[str, Any]) -> Dict[str, Any]

Initialise the state dictionary for processing.

process_chunk

def process_chunk(data: Tuple[np.ndarray, np.ndarray], state: Dict[str, Any]) -> Dict[str, Any]

Process a single data chunk to find events.

process_file

def process_file(name: str, sampling_interval: float, data_path: str, smooth_win_t: Optional[float] = None, smooth_win_f: Optional[float] = None, detection_snr: float = 3.0, min_event_keep_snr: float = 6.0, min_event_t: float = 0.75e-6, widen_frac: float = 10.0, signal_polarity: int = -1, max_plot_points: int = 10000, envelope_mode_limit: float = 10e-3, sidecar: Optional[str] = None, crop: Optional[List[int]] = None, yscale_mode: str = "snr", show_plots: bool = True, filter_type: str = "gaussian", filter_order: int = 2, chunk_size: Optional[int] = None) -> None

Process a single waveform file for event detection.

EventPlotter

class EventPlotter:
    def __init__(self, osc_plot: OscilloscopePlot, events: Optional[np.ndarray] = None, trace_idx: int = 0, bg_clean: Optional[np.ndarray] = None, global_noise: Optional[np.float32] = None, y_scale_mode: str = "raw")

Provides utility functions for plotting individual events or event grids.

detect_events

def detect_events(time: np.ndarray, signal: np.ndarray, bg: np.ndarray, snr_threshold: np.float32 = np.float32(2.0), min_event_len: int = 20, min_event_amp: np.float32 = np.float32(0.0), widen_frac: np.float32 = np.float32(0.5), global_noise: Optional[np.float32] = None, signal_polarity: int = -1) -> Tuple[np.ndarray, np.float32]

Detect events in signal above background with specified thresholds.

merge_overlapping_events

def merge_overlapping_events(events: np.ndarray) -> np.ndarray

Merge overlapping events.

get_waveform_params

def get_waveform_params(bin_filename: str, data_path: Optional[str] = None, sidecar: Optional[str] = None) -> Dict[str, Any]

Parse XML sidecar file to extract waveform parameters.

rd

def rd(filename: str, sampling_interval: Optional[float] = None, data_path: Optional[str] = None, sidecar: Optional[str] = None, crop: Optional[List[int]] = None) -> Tuple[np.ndarray, np.ndarray]

Read waveform binary file using sidecar XML for parameters.

rd_chunked

def rd_chunked(filename: str, chunk_size: int, sampling_interval: Optional[float] = None, data_path: Optional[str] = None, sidecar: Optional[str] = None) -> Generator[Tuple[np.ndarray, np.ndarray], None, None]

Read waveform binary file in chunks using sidecar XML for parameters. This is a generator function that yields chunks of data.