summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore201
-rw-r--r--LICENSE674
-rw-r--r--README.md144
-rw-r--r--example.py136
-rw-r--r--pyproject.toml23
-rw-r--r--src/transivent/__init__.py36
-rw-r--r--src/transivent/analysis.py1238
-rw-r--r--src/transivent/event_detector.py404
-rw-r--r--src/transivent/event_plotter.py524
-rw-r--r--src/transivent/io.py456
10 files changed, 3836 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..25637f0
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,201 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[codz]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py.cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+# Pipfile.lock
+
+# UV
+# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# uv.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+# poetry.lock
+# poetry.toml
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
+# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
+# pdm.lock
+# pdm.toml
+.pdm-python
+.pdm-build/
+
+# pixi
+# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
+# pixi.lock
+# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
+# in the .venv directory. It is recommended not to include this directory in version control.
+.pixi
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# Redis
+*.rdb
+*.aof
+*.pid
+
+# RabbitMQ
+mnesia/
+rabbitmq/
+rabbitmq-data/
+
+# ActiveMQ
+activemq-data/
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.envrc
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# .idea/
+
+# Visual Studio Code
+# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
+# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
+# and can be added to the global gitignore or merged into this file. However, if you prefer,
+# you could uncomment the following to ignore the entire vscode folder
+# .vscode/
+
+# Ruff stuff:
+.ruff_cache/
+
+# PyPI configuration file
+.pypirc
+
+# Windows Zone Identifier
+:Zone.Identifier
+
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<https://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0ff20d5
--- /dev/null
+++ b/README.md
@@ -0,0 +1,144 @@
+# transivent
+
+`transivent` is a Python library for detecting and analysing transient events (~spikes) in time-series data. It provides a flexible and configurable pipeline for processing waveform data, identifying events based on signal-to-noise ratio, and visualizing the results. The library is designed to handle large files efficiently through chunked processing.
+
+## Quick Start
+
+The primary entrypoint for analysis is the `transivent.process_file` function. The analysis pipeline is controlled via a configuration dictionary, and the library provides functions to read waveform data from binary files with XML sidecars.
+
+Here is a brief example based on `example.py`:
+
+```python
+from transivent import process_file, get_waveform_params
+
+# 1. Define the analysis configuration
+CONFIG = {
+ "DATA_PATH": "path/to/your/data/",
+ "SMOOTH_WIN_T": 10e-3,
+ "DETECTION_SNR": 3,
+ "MIN_EVENT_KEEP_SNR": 5,
+ "SIGNAL_POLARITY": 1,
+ "CHUNK_SIZE": 1_000_000, # Set to a value to enable chunking
+ # ... and more
+}
+
+# 2. Define the measurement file
+measurement = {
+ "data": "RefCurve_2025-07-17_0_065114.Wfm.bin",
+}
+
+# 3. Merge configs and get waveform parameters
+config = {**CONFIG, **measurement}
+params = get_waveform_params(
+ config["data"], data_path=config["DATA_PATH"]
+)
+
+# 4. Run the processing pipeline
+process_file(
+ name=config["data"],
+ data_path=config["DATA_PATH"],
+ sampling_interval=params["sampling_interval"],
+ smooth_win_t=config.get("SMOOTH_WIN_T"),
+ detection_snr=config.get("DETECTION_SNR"),
+ min_event_keep_snr=config.get("MIN_EVENT_KEEP_SNR"),
+ signal_polarity=config.get("SIGNAL_POLARITY"),
+ chunk_size=config.get("CHUNK_SIZE"),
+ # ... other parameters
+)
+```
+
+## API Documentation
+
+The public interface of `transivent` is defined in the main `__init__.py` package.
+
+---
+### `analyze_thresholds`
+```python
+def analyze_thresholds(x: np.ndarray, bg_clean: np.ndarray, global_noise: np.float32, detection_snr: float, min_event_keep_snr: float, signal_polarity: int) -> Tuple[np.ndarray, np.ndarray]
+```
+Analyze threshold statistics and create threshold arrays.
+
+### `calculate_initial_background`
+```python
+def calculate_initial_background(t: np.ndarray, x: np.ndarray, smooth_n: int, filter_type: str = "gaussian") -> np.ndarray
+```
+Calculate initial background estimate.
+
+### `calculate_smoothing_parameters`
+```python
+def calculate_smoothing_parameters(sampling_interval: float, smooth_win_t: Optional[float], smooth_win_f: Optional[float], min_event_t: float, detection_snr: float, min_event_keep_snr: float, widen_frac: float, signal_polarity: int) -> Tuple[int, int]
+```
+Calculate smoothing window size and minimum event length in samples.
+
+### `configure_logging`
+```python
+def configure_logging(log_level: str = "INFO") -> None
+```
+Configure loguru logging with specified level.
+
+### `create_oscilloscope_plot`
+```python
+def create_oscilloscope_plot(t: np.ndarray, x: np.ndarray, bg_initial: np.ndarray, bg_clean: np.ndarray, events: np.ndarray, detection_threshold: np.ndarray, keep_threshold: np.ndarray, name: str, detection_snr: float, min_event_keep_snr: float, max_plot_points: int, envelope_mode_limit: float, smooth_n: int, global_noise: Optional[np.float32] = None) -> OscilloscopePlot
+```
+Create oscilloscope plot with all visualization elements.
+
+### `get_final_events`
+```python
+def get_final_events(state: Dict[str, Any]) -> np.ndarray
+```
+Extract and finalise the list of detected events from the state.
+
+### `initialize_state`
+```python
+def initialize_state(config: Dict[str, Any]) -> Dict[str, Any]
+```
+Initialise the state dictionary for processing.
+
+### `process_chunk`
+```python
+def process_chunk(data: Tuple[np.ndarray, np.ndarray], state: Dict[str, Any]) -> Dict[str, Any]
+```
+Process a single data chunk to find events.
+
+### `process_file`
+```python
+def process_file(name: str, sampling_interval: float, data_path: str, smooth_win_t: Optional[float] = None, smooth_win_f: Optional[float] = None, detection_snr: float = 3.0, min_event_keep_snr: float = 6.0, min_event_t: float = 0.75e-6, widen_frac: float = 10.0, signal_polarity: int = -1, max_plot_points: int = 10000, envelope_mode_limit: float = 10e-3, sidecar: Optional[str] = None, crop: Optional[List[int]] = None, yscale_mode: str = "snr", show_plots: bool = True, filter_type: str = "gaussian", filter_order: int = 2, chunk_size: Optional[int] = None) -> None
+```
+Process a single waveform file for event detection.
+
+### `EventPlotter`
+```python
+class EventPlotter:
+ def __init__(self, osc_plot: OscilloscopePlot, events: Optional[np.ndarray] = None, trace_idx: int = 0, bg_clean: Optional[np.ndarray] = None, global_noise: Optional[np.float32] = None, y_scale_mode: str = "raw")
+```
+Provides utility functions for plotting individual events or event grids.
+
+### `detect_events`
+```python
+def detect_events(time: np.ndarray, signal: np.ndarray, bg: np.ndarray, snr_threshold: np.float32 = np.float32(2.0), min_event_len: int = 20, min_event_amp: np.float32 = np.float32(0.0), widen_frac: np.float32 = np.float32(0.5), global_noise: Optional[np.float32] = None, signal_polarity: int = -1) -> Tuple[np.ndarray, np.float32]
+```
+Detect events in signal above background with specified thresholds.
+
+### `merge_overlapping_events`
+```python
+def merge_overlapping_events(events: np.ndarray) -> np.ndarray
+```
+Merge overlapping events.
+
+### `get_waveform_params`
+```python
+def get_waveform_params(bin_filename: str, data_path: Optional[str] = None, sidecar: Optional[str] = None) -> Dict[str, Any]
+```
+Parse XML sidecar file to extract waveform parameters.
+
+### `rd`
+```python
+def rd(filename: str, sampling_interval: Optional[float] = None, data_path: Optional[str] = None, sidecar: Optional[str] = None, crop: Optional[List[int]] = None) -> Tuple[np.ndarray, np.ndarray]
+```
+Read waveform binary file using sidecar XML for parameters.
+
+### `rd_chunked`
+```python
+def rd_chunked(filename: str, chunk_size: int, sampling_interval: Optional[float] = None, data_path: Optional[str] = None, sidecar: Optional[str] = None) -> Generator[Tuple[np.ndarray, np.ndarray], None, None]
+```
+Read waveform binary file in chunks using sidecar XML for parameters. This is a generator function that yields chunks of data.
diff --git a/example.py b/example.py
new file mode 100644
index 0000000..06c33c5
--- /dev/null
+++ b/example.py
@@ -0,0 +1,136 @@
+from warnings import warn
+
+import matplotlib as mpl
+
+from transivent import configure_logging, get_waveform_params, process_file
+
+# --- User configuration dictionary ---
+CONFIG = {
+ "SMOOTH_WIN_T": 10e-3, # smoothing window in seconds (set to None to use frequency)
+ "SMOOTH_WIN_F": None, # smoothing window in Hz (set to None to use time)
+ "DETECTION_SNR": 3, # point-by-point detection threshold, <MIN_EVENT_KEEP_SNR
+ "MIN_EVENT_KEEP_SNR": 5, # min event (max-)amplitude in multiples of global noise
+ "MIN_EVENT_T": 0.75e-6, # minimum event duration (seconds)
+ "WIDEN_FRAC": 10, # fraction of event length to widen detected events
+ "SIGNAL_POLARITY": 1, # Signal polarity: -1 for negative events (below background), +1 for positive events (above background)
+ "LOG_LEVEL": "INFO", # logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL
+ "MAX_PLOT_POINTS": 10000, # Downsample threshold for plotting
+ "ENVELOPE_MODE_LIMIT": 10e-3, # Use envelope when time span >10ms, show thresholds when <10ms
+ "YSCALE_MODE": "snr", # y-scale mode for event plotter: 'snr', 'percent' or 'raw'
+ "FILTER_TYPE": "median", # Filter type: "savgol", "gaussian", "moving_average", "median"
+ "FILTER_ORDER": 3, # Order of the savgol filter for smoothing
+ "CHUNK_SIZE": None, # Set to None to disable chunking
+ # ---
+ "DATA_PATH": "../hycav/data/2025-07-17_bsa/",
+ "MEASUREMENTS": [
+ {
+ "data": "RefCurve_2025-07-17_0_065114.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_1_065214.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_2_065510.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_3_065814.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_4_065850.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_5_070003.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_6_070045.Wfm.bin",
+ },
+ {
+ "data": "RefCurve_2025-07-17_7_070339.Wfm.bin",
+ },
+ ],
+}
+
+
+def main() -> None:
+ """
+ Main function to process all measurements.
+ """
+ # Configure logging
+ configure_logging(CONFIG.get("LOG_LEVEL", "INFO"))
+
+ for measurement in CONFIG["MEASUREMENTS"]:
+ # Merge global config with measurement-specific overrides
+ merged_config = CONFIG.copy()
+ merged_config.update(measurement)
+
+ # Extract parameters for process_file
+ name = merged_config["data"]
+ sidecar = merged_config.get("sidecar")
+
+ params = get_waveform_params(
+ name, data_path=merged_config["DATA_PATH"], sidecar=sidecar
+ )
+ sampling_interval = params["sampling_interval"]
+
+ # Call with explicit parameters
+ process_file(
+ name=name,
+ sampling_interval=sampling_interval,
+ data_path=merged_config["DATA_PATH"],
+ smooth_win_t=merged_config.get("SMOOTH_WIN_T"),
+ smooth_win_f=merged_config.get("SMOOTH_WIN_F"),
+ detection_snr=merged_config.get("DETECTION_SNR", 3.0),
+ min_event_keep_snr=merged_config.get("MIN_EVENT_KEEP_SNR", 6.0),
+ min_event_t=merged_config.get("MIN_EVENT_T", 0.75e-6),
+ widen_frac=merged_config.get("WIDEN_FRAC", 10.0),
+ signal_polarity=merged_config.get("SIGNAL_POLARITY", -1),
+ max_plot_points=merged_config.get("MAX_PLOT_POINTS", 10000),
+ envelope_mode_limit=merged_config.get("ENVELOPE_MODE_LIMIT", 10e-3),
+ sidecar=sidecar,
+ crop=merged_config.get("crop"),
+ yscale_mode=merged_config.get("YSCALE_MODE", "snr"),
+ show_plots=True,
+ filter_type=merged_config.get("FILTER_TYPE", "gaussian"),
+ filter_order=merged_config.get("FILTER_ORDER", 2),
+ chunk_size=merged_config.get("CHUNK_SIZE"),
+ )
+
+
+if __name__ == "__main__":
+ # Set Matplotlib rcParams directly here
+ for optn, val in {
+ "backend": "QtAgg",
+ "figure.constrained_layout.use": True,
+ "figure.dpi": 90,
+ # "figure.figsize": (8.5 / 2.55, 6 / 2.55),
+ "font.family": ("sans-serif",),
+ "font.size": 11,
+ "legend.fontsize": "x-small",
+ "legend.handlelength": 1.5,
+ "legend.handletextpad": 0.6,
+ "lines.markersize": 4.0,
+ "lines.markeredgewidth": 1.6,
+ "lines.linewidth": 1.8,
+ "xtick.labelsize": 10,
+ "xtick.major.size": 3,
+ "xtick.direction": "in",
+ "ytick.labelsize": 10,
+ "ytick.direction": "in",
+ "ytick.major.size": 3,
+ "axes.formatter.useoffset": False,
+ "axes.formatter.use_mathtext": True,
+ "errorbar.capsize": 3.0,
+ "axes.linewidth": 1.4,
+ "xtick.major.width": 1.4,
+ "xtick.minor.width": 1.1,
+ "ytick.major.width": 1.4,
+ "ytick.minor.width": 1.1,
+ "axes.labelsize": 11,
+ }.items():
+ if isinstance(val, (list, tuple)):
+ val = tuple(val)
+ try:
+ mpl.rcParams[optn] = val
+ except KeyError:
+ warn(f"mpl rcparams key '{optn}' not recognised as a valid rc parameter.")
+ main()
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..b94d957
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,23 @@
+[project]
+name = "transivent"
+version = "0.1.0"
+description = "High-level analysis and plotting for transient events."
+license = { file = "LICENSE" }
+authors = [{ name = "Sam Scholten", email = "s.scholten@uq.edu.au" }]
+requires-python = ">=3.8"
+dependencies = [
+ "loguru",
+ "matplotlib",
+ "numba",
+ "Pillow",
+ "scipy",
+ "scopekit @ git+https://git.samsci.com/cgit.cgi/scopekit",
+]
+
+[build-system]
+requires = ["setuptools>=61.0"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
diff --git a/src/transivent/__init__.py b/src/transivent/__init__.py
new file mode 100644
index 0000000..db3e824
--- /dev/null
+++ b/src/transivent/__init__.py
@@ -0,0 +1,36 @@
+"""
+High-level analysis and plotting for transient events.
+"""
+
+from .analysis import (
+ analyze_thresholds,
+ calculate_initial_background,
+ calculate_smoothing_parameters,
+ configure_logging,
+ create_oscilloscope_plot,
+ get_final_events,
+ initialize_state,
+ process_chunk,
+ process_file,
+)
+from .event_detector import detect_events, merge_overlapping_events
+from .event_plotter import EventPlotter
+from .io import get_waveform_params, rd, rd_chunked
+
+__all__ = [
+ "analyze_thresholds",
+ "calculate_initial_background",
+ "calculate_smoothing_parameters",
+ "configure_logging",
+ "create_oscilloscope_plot",
+ "detect_events",
+ "EventPlotter",
+ "get_final_events",
+ "get_waveform_params",
+ "initialize_state",
+ "merge_overlapping_events",
+ "process_chunk",
+ "process_file",
+ "rd",
+ "rd_chunked",
+]
diff --git a/src/transivent/analysis.py b/src/transivent/analysis.py
new file mode 100644
index 0000000..1b5277b
--- /dev/null
+++ b/src/transivent/analysis.py
@@ -0,0 +1,1238 @@
+import base64
+import io
+import os
+import sys
+import time
+from typing import Any, Dict, List, Optional, Tuple
+
+import matplotlib.pyplot as plt
+import numpy as np
+from loguru import logger
+from numba import njit
+from PIL import Image
+from scipy.ndimage import gaussian_filter1d, median_filter, uniform_filter1d
+from scipy.signal import savgol_filter
+
+from scopekit.plot import OscilloscopePlot
+from .io import _get_xml_sidecar_path, rd, rd_chunked
+
+from .event_detector import (
+ MEDIAN_TO_STD_FACTOR,
+ detect_events,
+ merge_overlapping_events,
+)
+from .event_plotter import EventPlotter
+import xml.etree.ElementTree as ET
+
+
+@njit
+def _create_event_mask_numba(t: np.ndarray, events: np.ndarray) -> np.ndarray:
+ """
+ Create a boolean mask to exclude event regions using numba for speed.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ events : np.ndarray
+ Events array with shape (n_events, 2) where each row is [t_start, t_end].
+
+ Returns
+ -------
+ np.ndarray
+ Boolean mask where True means keep the sample, False means exclude.
+ """
+ mask = np.ones(len(t), dtype=np.bool_)
+ if len(events) == 0:
+ return mask
+
+ for i in range(len(events)):
+ t_start = events[i, 0]
+ t_end = events[i, 1]
+
+ start_idx = np.searchsorted(t, t_start, side="left")
+ end_idx = np.searchsorted(t, t_end, side="left")
+
+ if start_idx < end_idx:
+ mask[start_idx:end_idx] = False
+
+ return mask
+
+
+def extract_preview_image(sidecar_path: str, output_path: str) -> Optional[str]:
+ """
+ Extract preview image from XML sidecar and save as PNG.
+
+ Parameters
+ ----------
+ sidecar_path : str
+ Path to the XML sidecar file.
+ output_path : str
+ Path where to save the PNG file.
+
+ Returns
+ -------
+ Optional[str]
+ Path to saved PNG file, or None if no image found.
+ """
+ try:
+ tree = ET.parse(sidecar_path)
+ root = tree.getroot()
+
+ # Find PreviewImage element
+ preview_elem = root.find(".//PreviewImage")
+ if preview_elem is None:
+ logger.warning(f"No PreviewImage found in {sidecar_path}")
+ return None
+
+ image_data = preview_elem.get("ImageData")
+ if not image_data:
+ logger.warning(f"Empty ImageData in PreviewImage from {sidecar_path}")
+ return None
+
+ # Decode base64 image data
+ image_bytes = base64.b64decode(image_data)
+
+ # Open with PIL and save as PNG
+ image = Image.open(io.BytesIO(image_bytes))
+ image.save(output_path, "PNG")
+
+ logger.info(f"Saved preview image: {output_path}")
+ return output_path
+
+ except Exception as e:
+ logger.warning(f"Failed to extract preview image from {sidecar_path}: {e}")
+ return None
+
+
+def plot_preview_image(image_path: str, title: str = "Preview Image") -> None:
+ """
+ Display preview image using matplotlib.
+
+ Parameters
+ ----------
+ image_path : str
+ Path to the image file.
+ title : str
+ Title for the plot.
+ """
+ try:
+ image = Image.open(image_path)
+
+ fig, ax = plt.subplots(figsize=(10, 6))
+ ax.imshow(image)
+ ax.set_title(title)
+ ax.axis('off') # Hide axes for cleaner display
+
+ except Exception as e:
+ logger.warning(f"Failed to display preview image {image_path}: {e}")
+
+
+def configure_logging(log_level: str = "INFO") -> None:
+ """
+ Configure loguru logging with specified level.
+
+ Parameters
+ ----------
+ log_level : str, default="INFO"
+ Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL.
+ """
+ logger.remove()
+ logger.add(
+ sys.stderr,
+ level=log_level.upper(),
+ format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <level>{message}</level>",
+ colorize=True,
+ )
+
+
+def load_data(
+ name: str,
+ sampling_interval: float,
+ data_path: str,
+ sidecar: Optional[str] = None,
+ crop: Optional[List[int]] = None,
+) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Stage 1: Load waveform data from file.
+
+ Parameters
+ ----------
+ name : str
+ Filename of the waveform data.
+ sampling_interval : float
+ Sampling interval in seconds.
+ data_path : str
+ Path to data directory.
+ sidecar : str, optional
+ XML sidecar filename.
+ crop : List[int], optional
+ Crop indices [start, end].
+
+ Returns
+ -------
+ Tuple[np.ndarray, np.ndarray]
+ Time and signal arrays.
+ """
+ logger.success(f"Loading data from {name}")
+ t, x = rd(
+ name,
+ sampling_interval,
+ data_path=data_path,
+ sidecar=sidecar,
+ crop=crop,
+ )
+
+ logger.debug(
+ f"Signal statistics: min={np.min(x):.3g}, max={np.max(x):.3g}, mean={np.mean(x):.3g}, std={np.std(x):.3g}"
+ )
+
+ return t, x
+
+
+def calculate_smoothing_parameters(
+ sampling_interval: float,
+ smooth_win_t: Optional[float],
+ smooth_win_f: Optional[float],
+ min_event_t: float,
+ detection_snr: float,
+ min_event_keep_snr: float,
+ widen_frac: float,
+ signal_polarity: int,
+) -> Tuple[int, int]:
+ """
+ Calculate smoothing window size and minimum event length in samples.
+
+ Parameters
+ ----------
+ sampling_interval : float
+ Sampling interval in seconds.
+ smooth_win_t : Optional[float]
+ Smoothing window in seconds.
+ smooth_win_f : Optional[float]
+ Smoothing window in Hz.
+ min_event_t : float
+ Minimum event duration in seconds.
+ detection_snr : float
+ Detection SNR threshold.
+ min_event_keep_snr : float
+ Minimum event keep SNR threshold.
+ widen_frac : float
+ Fraction to widen detected events.
+ signal_polarity : int
+ Signal polarity (-1 for negative, +1 for positive).
+
+ Returns
+ -------
+ Tuple[int, int]
+ Smoothing window size and minimum event length in samples.
+ """
+ if smooth_win_t is not None:
+ smooth_n = int(smooth_win_t / sampling_interval)
+ elif smooth_win_f is not None:
+ smooth_n = int(1 / (smooth_win_f * sampling_interval))
+ else:
+ raise ValueError("Set either smooth_win_t or smooth_win_f")
+
+ if smooth_n % 2 == 0:
+ smooth_n += 1
+
+ min_event_n = max(1, int(min_event_t / sampling_interval))
+
+ smooth_freq_hz = 1 / (smooth_n * sampling_interval)
+ logger.info(
+ f"--Smooth window: {smooth_n} samples ({smooth_win_t * 1e6:.1f} µs, {smooth_freq_hz:.1f} Hz)"
+ )
+ logger.info(
+ f"--Min event length: {min_event_n} samples ({min_event_t * 1e6:.1f} µs)"
+ )
+ logger.info(f"--Detection SNR: {detection_snr}")
+ logger.info(f"--Min keep SNR: {min_event_keep_snr}")
+ logger.info(f"--Widen fraction: {widen_frac}")
+ logger.info(
+ f"--Signal polarity: {signal_polarity} ({'negative' if signal_polarity < 0 else 'positive'} events)"
+ )
+
+ return smooth_n, min_event_n
+
+
+def calculate_initial_background(
+ t: np.ndarray, x: np.ndarray, smooth_n: int, filter_type: str = "gaussian"
+) -> np.ndarray:
+ """
+ Stage 2: Calculate initial background estimate.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ smooth_n : int
+ Smoothing window size in samples.
+ filter_type : str, default="gaussian"
+ Filter type: "savgol", "gaussian", "moving_average", "median".
+
+ Returns
+ -------
+ np.ndarray
+ Initial background estimate.
+
+ Notes
+ -----
+ 1 Start with Gaussian - Best balance of speed, noise rejection, and event preservation
+ 2 Try Median if you see frequent spikes/glitches in your data
+ 3 Use Moving Average for maximum speed if events are well above noise
+ 4 Reserve Savitzky-Golay for final high-quality analysis of interesting datasets
+ """
+ logger.info(f"Calculating initial background using {filter_type} filter")
+
+ if filter_type == "savgol":
+ bg_initial = savgol_filter(x, smooth_n, 3).astype(np.float32)
+ elif filter_type == "gaussian":
+ sigma = smooth_n / 6.0 # Convert window to sigma (6-sigma rule)
+ bg_initial = gaussian_filter1d(x.astype(np.float64), sigma).astype(np.float32)
+ elif filter_type == "moving_average":
+ # Use scipy's uniform_filter1d for proper edge handling
+ bg_initial = uniform_filter1d(
+ x.astype(np.float64), size=smooth_n, mode="nearest"
+ ).astype(np.float32)
+ elif filter_type == "median":
+ bg_initial = median_filter(x.astype(np.float64), size=smooth_n).astype(
+ np.float32
+ )
+ else:
+ raise ValueError(
+ f"Unknown filter_type: {filter_type}. Choose from 'savgol', 'gaussian', 'moving_average', 'median'"
+ )
+
+ logger.debug(
+ f"Initial background: mean={np.mean(bg_initial):.3g}, std={np.std(bg_initial):.3g}"
+ )
+ return bg_initial
+
+
+def estimate_noise(x: np.ndarray, bg_initial: np.ndarray) -> np.float32:
+ """
+ Stage 2: Estimate noise level.
+
+ Parameters
+ ----------
+ x : np.ndarray
+ Signal array.
+ bg_initial : np.ndarray
+ Initial background estimate.
+
+ Returns
+ -------
+ np.float32
+ Estimated noise level.
+ """
+ global_noise = np.float32(np.median(np.abs(x - bg_initial)) * MEDIAN_TO_STD_FACTOR)
+
+ signal_rms = np.sqrt(np.mean(x**2))
+ signal_range = np.max(x) - np.min(x)
+ noise_pct_rms = 100 * global_noise / signal_rms if signal_rms > 0 else 0
+ noise_pct_range = 100 * global_noise / signal_range if signal_range > 0 else 0
+
+ logger.info(
+ f"Global noise level: {global_noise:.3g} ({noise_pct_rms:.1f}% of RMS, {noise_pct_range:.1f}% of range)"
+ )
+
+ snr_estimate = np.std(x) / global_noise
+ logger.info(f"Estimated signal SNR: {snr_estimate:.2f}")
+
+ return global_noise
+
+
+def detect_initial_events(
+ t: np.ndarray,
+ x: np.ndarray,
+ bg_initial: np.ndarray,
+ global_noise: np.float32,
+ detection_snr: float,
+ min_event_keep_snr: float,
+ widen_frac: float,
+ signal_polarity: int,
+ min_event_n: int,
+) -> np.ndarray:
+ """
+ Stage 3: Detect initial events.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ bg_initial : np.ndarray
+ Initial background estimate.
+ global_noise : np.float32
+ Estimated noise level.
+ detection_snr : float
+ Detection SNR threshold.
+ min_event_keep_snr : float
+ Minimum event keep SNR threshold.
+ widen_frac : float
+ Fraction to widen detected events.
+ signal_polarity : int
+ Signal polarity (-1 for negative, +1 for positive).
+ min_event_n : int
+ Minimum event length in samples.
+
+ Returns
+ -------
+ np.ndarray
+ Array of initial events.
+ """
+ logger.info("Detecting initial events")
+ min_event_amp = np.float32(min_event_keep_snr) * global_noise
+
+ logger.info(f"Detection threshold: {detection_snr}σ below background")
+ logger.info(f"Keep threshold: {min_event_keep_snr}σ below background")
+ logger.info(f"Min event amplitude threshold: {min_event_amp:.3g}")
+
+ events_initial, _ = detect_events(
+ t,
+ x,
+ bg_initial,
+ snr_threshold=np.float32(detection_snr),
+ min_event_len=min_event_n,
+ min_event_amp=min_event_amp,
+ widen_frac=np.float32(widen_frac),
+ global_noise=global_noise,
+ signal_polarity=signal_polarity,
+ )
+
+ logger.info(f"Found {len(events_initial)} initial events after filtering")
+
+ events_initial = merge_overlapping_events(events_initial)
+ logger.info(f"After merging: {len(events_initial)} events")
+
+ return events_initial
+
+
+def calculate_clean_background(
+ t: np.ndarray,
+ x: np.ndarray,
+ events_initial: np.ndarray,
+ smooth_n: int,
+ bg_initial: np.ndarray,
+ filter_type: str = "gaussian",
+ filter_order: int = 2,
+) -> np.ndarray:
+ """
+ Stage 4: Calculate clean background by masking events.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ events_initial : np.ndarray
+ Initial events array.
+ smooth_n : int
+ Smoothing window size in samples.
+ bg_initial : np.ndarray
+ Initial background estimate.
+ filter_type : str, default="gaussian"
+ Filter type: "savgol", "gaussian", "moving_average", "median".
+ filter_order : int, default=2
+ Order of the Savitzky-Golay filter (only used for filter_type="savgol").
+
+ Returns
+ -------
+ np.ndarray
+ Clean background estimate.
+
+
+ Notes
+ -----
+ 1 Start with Gaussian - Best balance of speed, noise rejection, and event preservation
+ 2 Try Median if you see frequent spikes/glitches in your data
+ 3 Use Moving Average for maximum speed if events are well above noise
+ 4 Reserve Savitzky-Golay for final high-quality analysis of interesting datasets
+ """
+ logger.info(f"Calculating clean background using {filter_type} filter")
+ start_time = time.time()
+
+ # Fast masking with numba
+ mask = _create_event_mask_numba(t, events_initial)
+ mask_time = time.time()
+
+ logger.debug(
+ f"Masked {np.sum(~mask)} samples ({100 * np.sum(~mask) / len(mask):.1f}%) for clean background"
+ )
+
+ t_masked = t[mask]
+ x_masked = x[mask]
+
+ if np.sum(mask) > 2 * smooth_n:
+ # Check if we need interpolation (events detected and masking applied)
+ if len(events_initial) == 0 or np.all(mask):
+ # No events detected or no masking needed - skip interpolation
+ logger.debug("No events to mask - using direct filtering")
+ interp_start = time.time()
+ x_interp = x
+ interp_end = time.time()
+ else:
+ # Events detected - need interpolation
+ interp_start = time.time()
+ x_interp = np.interp(t, t_masked, x_masked)
+ interp_end = time.time()
+
+ filter_start = time.time()
+ if filter_type == "savgol":
+ bg_clean = savgol_filter(x_interp, smooth_n, filter_order).astype(
+ np.float32
+ )
+ elif filter_type == "gaussian":
+ sigma = smooth_n / 6.0 # Convert window to sigma (6-sigma rule)
+ bg_clean = gaussian_filter1d(x_interp.astype(np.float64), sigma).astype(
+ np.float32
+ )
+ elif filter_type == "moving_average":
+ bg_clean = uniform_filter1d(
+ x_interp.astype(np.float64), size=smooth_n, mode="nearest"
+ ).astype(np.float32)
+ elif filter_type == "median":
+ bg_clean = median_filter(x_interp.astype(np.float64), size=smooth_n).astype(
+ np.float32
+ )
+ else:
+ raise ValueError(
+ f"Unknown filter_type: {filter_type}. Choose from 'savgol', 'gaussian', 'moving_average', 'median'"
+ )
+ filter_end = time.time()
+
+ logger.success(
+ f"Timing: mask={mask_time - start_time:.3f}s, interp={interp_end - interp_start:.3f}s, filter={filter_end - filter_start:.3f}s"
+ )
+ logger.debug(
+ f"Clean background: mean={np.mean(bg_clean):.3g}, std={np.std(bg_clean):.3g}"
+ )
+ else:
+ logger.debug(
+ "Insufficient unmasked samples for clean background - using initial"
+ )
+ bg_clean = bg_initial
+
+ return bg_clean
+
+
+def analyze_thresholds(
+ x: np.ndarray,
+ bg_clean: np.ndarray,
+ global_noise: np.float32,
+ detection_snr: float,
+ min_event_keep_snr: float,
+ signal_polarity: int,
+) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Analyze threshold statistics and create threshold arrays.
+
+ Parameters
+ ----------
+ x : np.ndarray
+ Signal array.
+ bg_clean : np.ndarray
+ Clean background estimate.
+ global_noise : np.float32
+ Estimated noise level.
+ detection_snr : float
+ Detection SNR threshold.
+ min_event_keep_snr : float
+ Minimum event keep SNR threshold.
+ signal_polarity : int
+ Signal polarity (-1 for negative, +1 for positive).
+
+ Returns
+ -------
+ Tuple[np.ndarray, np.ndarray]
+ Detection and keep threshold arrays.
+ """
+ logger.info("Analyzing thresholds")
+
+ if signal_polarity < 0:
+ detection_threshold = bg_clean - detection_snr * global_noise
+ keep_threshold = bg_clean - min_event_keep_snr * global_noise
+ below_detection_pct = 100 * np.sum(x < detection_threshold) / len(x)
+ below_keep_pct = 100 * np.sum(x < keep_threshold) / len(x)
+ logger.info(f"Samples below detection threshold: {below_detection_pct:.2f}%")
+ logger.info(f"Samples below keep threshold: {below_keep_pct:.2f}%")
+ else:
+ detection_threshold = bg_clean + detection_snr * global_noise
+ keep_threshold = bg_clean + min_event_keep_snr * global_noise
+ above_detection_pct = 100 * np.sum(x > detection_threshold) / len(x)
+ above_keep_pct = 100 * np.sum(x > keep_threshold) / len(x)
+ logger.info(f"Samples above detection threshold: {above_detection_pct:.2f}%")
+ logger.info(f"Samples above keep threshold: {above_keep_pct:.2f}%")
+
+ return detection_threshold, keep_threshold
+
+
+def detect_final_events(
+ t: np.ndarray,
+ x: np.ndarray,
+ bg_clean: np.ndarray,
+ global_noise: np.float32,
+ detection_snr: float,
+ min_event_keep_snr: float,
+ widen_frac: float,
+ signal_polarity: int,
+ min_event_n: int,
+) -> np.ndarray:
+ """
+ Stage 5: Detect final events using clean background.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ bg_clean : np.ndarray
+ Clean background estimate.
+ global_noise : np.float32
+ Estimated noise level.
+ detection_snr : float
+ Detection SNR threshold.
+ min_event_keep_snr : float
+ Minimum event keep SNR threshold.
+ widen_frac : float
+ Fraction to widen detected events.
+ signal_polarity : int
+ Signal polarity (-1 for negative, +1 for positive).
+ min_event_n : int
+ Minimum event length in samples.
+
+ Returns
+ -------
+ np.ndarray
+ Array of final events.
+ """
+ logger.info("Detecting final events")
+ min_event_amp = np.float32(min_event_keep_snr) * global_noise
+
+ events, noise = detect_events(
+ t,
+ x,
+ bg_clean,
+ snr_threshold=np.float32(detection_snr),
+ min_event_len=min_event_n,
+ min_event_amp=min_event_amp,
+ widen_frac=np.float32(widen_frac),
+ global_noise=global_noise,
+ signal_polarity=signal_polarity,
+ )
+
+ events = merge_overlapping_events(events)
+ logger.info(f"Detected {len(events)} final events")
+
+ return events
+
+
+def analyze_events(
+ t: np.ndarray,
+ x: np.ndarray,
+ bg_clean: np.ndarray,
+ events: np.ndarray,
+ global_noise: np.float32,
+ signal_polarity: int,
+) -> None:
+ """
+ Stage 7: Analyze event characteristics.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ bg_clean : np.ndarray
+ Clean background estimate.
+ events : np.ndarray
+ Events array.
+ global_noise : np.float32
+ Estimated noise level.
+ signal_polarity : int
+ Signal polarity (-1 for negative, +1 for positive).
+ """
+ if len(events) == 0:
+ logger.info("No events to analyze")
+ return
+ if len(events) > 1000:
+ logger.warning(
+ f"Detected {len(events)} events, which is more than 1000. Skipping analysis."
+ )
+ return
+
+ event_durations = (events[:, 1] - events[:, 0]) * 1000000 # Convert to µs
+ event_amplitudes = []
+
+ for t_start, t_end in events:
+ event_mask = (t >= t_start) & (t < t_end)
+ if np.any(event_mask):
+ if signal_polarity < 0:
+ amp = np.min(x[event_mask] - bg_clean[event_mask])
+ else:
+ amp = np.max(x[event_mask] - bg_clean[event_mask])
+ event_amplitudes.append(abs(amp))
+
+ if event_amplitudes:
+ logger.info(
+ f"Event durations (µs): min={np.min(event_durations):.2f}, max={np.max(event_durations):.2f}, mean={np.mean(event_durations):.2f}"
+ )
+ logger.info(
+ f"Event amplitudes: min={np.min(event_amplitudes):.3g}, max={np.max(event_amplitudes):.3g}, mean={np.mean(event_amplitudes):.3g}"
+ )
+ logger.info(
+ f"Event amplitude SNRs: min={np.min(event_amplitudes) / global_noise:.2f}, max={np.max(event_amplitudes) / global_noise:.2f}"
+ )
+
+ final_signal_rms = np.sqrt(np.mean(x**2))
+ final_noise_pct_rms = (
+ 100 * global_noise / final_signal_rms if final_signal_rms > 0 else 0
+ )
+ final_signal_range = np.max(x) - np.min(x)
+ final_noise_pct_range = (
+ 100 * global_noise / final_signal_range if final_signal_range > 0 else 0
+ )
+
+ logger.info(
+ f"Noise summary: {global_noise:.3g} ({final_noise_pct_rms:.1f}% of RMS, {final_noise_pct_range:.1f}% of range)"
+ )
+
+
+def create_oscilloscope_plot(
+ t: np.ndarray,
+ x: np.ndarray,
+ bg_initial: np.ndarray,
+ bg_clean: np.ndarray,
+ events: np.ndarray,
+ detection_threshold: np.ndarray,
+ keep_threshold: np.ndarray,
+ name: str,
+ detection_snr: float,
+ min_event_keep_snr: float,
+ max_plot_points: int,
+ envelope_mode_limit: float,
+ smooth_n: int,
+ global_noise: Optional[np.float32] = None,
+) -> OscilloscopePlot:
+ """
+ Stage 6: Create oscilloscope plot with all visualization elements.
+
+ Parameters
+ ----------
+ t : np.ndarray
+ Time array.
+ x : np.ndarray
+ Signal array.
+ bg_initial : np.ndarray
+ Initial background estimate.
+ bg_clean : np.ndarray
+ Clean background estimate.
+ events : np.ndarray
+ Events array.
+ detection_threshold : np.ndarray
+ Detection threshold array.
+ keep_threshold : np.ndarray
+ Keep threshold array.
+ name : str
+ Name for the plot.
+ detection_snr : float
+ Detection SNR threshold.
+ min_event_keep_snr : float
+ Minimum event keep SNR threshold.
+ max_plot_points : int
+ Maximum plot points for decimation.
+ envelope_mode_limit : float
+ Envelope mode limit.
+ smooth_n : int
+ Smoothing window size in samples.
+ global_noise : Optional[np.float32], default=None
+ Estimated noise level. If provided, will be plotted as a ribbon.
+
+ Returns
+ -------
+ OscilloscopePlot
+ Configured oscilloscope plot.
+ """
+ logger.info("Creating visualization")
+
+ plot_name = name
+ if global_noise is not None:
+ plot_signal_rms = np.sqrt(np.mean(x**2))
+ plot_noise_pct_rms = (
+ 100 * global_noise / plot_signal_rms if plot_signal_rms > 0 else 0
+ )
+ plot_name = f"{name} | Global noise: {global_noise:.3g} ({plot_noise_pct_rms:.1f}% of RMS)"
+
+ plot = OscilloscopePlot(
+ t,
+ x,
+ name=plot_name,
+ max_plot_points=max_plot_points,
+ mode_switch_threshold=envelope_mode_limit,
+ envelope_window_samples=None, # Envelope window now calculated automatically based on zoom
+ )
+
+ plot.add_line(
+ t,
+ bg_clean,
+ label="Background",
+ color="orange",
+ alpha=0.6,
+ linewidth=1.5,
+ display_mode=OscilloscopePlot.MODE_BOTH,
+ )
+
+ if global_noise is not None:
+ plot.add_ribbon(
+ t,
+ bg_clean,
+ global_noise,
+ label="Noise (±1σ)",
+ color="gray",
+ alpha=0.3,
+ display_mode=OscilloscopePlot.MODE_DETAIL,
+ )
+
+ plot.add_line(
+ t,
+ detection_threshold,
+ label=f"Detection ({detection_snr}σ)",
+ color="red",
+ alpha=0.7,
+ linestyle=":",
+ linewidth=1.5,
+ display_mode=OscilloscopePlot.MODE_DETAIL,
+ )
+
+ plot.add_line(
+ t,
+ keep_threshold,
+ label=f"Keep ({min_event_keep_snr}σ)",
+ color="darkred",
+ alpha=0.7,
+ linestyle="--",
+ linewidth=1.5,
+ display_mode=OscilloscopePlot.MODE_DETAIL,
+ )
+
+ if len(events) > 0:
+ plot.add_regions(
+ events,
+ label="Events",
+ color="crimson",
+ alpha=0.4,
+ display_mode=OscilloscopePlot.MODE_BOTH,
+ )
+
+ plot.render()
+ return plot
+
+
+def initialize_state(config: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Initialise the state dictionary for processing.
+
+ Parameters
+ ----------
+ config : Dict[str, Any]
+ Configuration dictionary containing analysis parameters.
+
+ Returns
+ -------
+ Dict[str, Any]
+ The initial state dictionary.
+ """
+ # Normalise keys to lowercase to allow for flexible config from user scripts
+ normalized_config = {k.lower(): v for k, v in config.items()}
+ return {
+ "config": normalized_config,
+ "events": [], # To store lists of events from each chunk
+ "overlap_buffer": {"t": np.array([]), "x": np.array([])}, # For seamless filtering
+ "incomplete_event": None, # To handle events spanning chunks
+ }
+
+
+def get_final_events(state: Dict[str, Any]) -> np.ndarray:
+ """
+ Extract and finalise the list of detected events from the state.
+
+ Parameters
+ ----------
+ state : Dict[str, Any]
+ The final state dictionary after processing all chunks.
+
+ Returns
+ -------
+ np.ndarray
+ The final, merged list of all detected events.
+ """
+ if not state["events"]:
+ return np.empty((0, 2), dtype=np.float32)
+
+ all_events = np.vstack(state["events"])
+ return merge_overlapping_events(all_events)
+
+
+def process_chunk(
+ data: Tuple[np.ndarray, np.ndarray], state: Dict[str, Any]
+) -> Dict[str, Any]:
+ """
+ Process a single data chunk to find events.
+
+ This function contains the core analysis pipeline. It takes a data chunk
+ and the current state, performs event detection, and returns the updated
+ state along with intermediate results for plotting.
+
+ Parameters
+ ----------
+ data : Tuple[np.ndarray, np.ndarray]
+ A tuple containing the time (t) and signal (x) arrays for the chunk.
+ state : Dict[str, Any]
+ The current state dictionary.
+
+ Returns
+ -------
+ Dict[str, Any]
+ A dictionary containing the updated state and intermediate results:
+ - "state": The updated state dictionary.
+ - "bg_initial": The initial background estimate.
+ - "global_noise": The estimated global noise for the chunk.
+ - "events_initial": The initially detected events.
+ - "bg_clean": The cleaned background estimate.
+ - "events": The final detected events for the chunk.
+ """
+ t_chunk, x_chunk = data
+ config = state["config"]
+ overlap_buffer = state["overlap_buffer"]
+ smooth_n = config["smooth_n"]
+
+ # Prepend overlap buffer from previous chunk
+ t = np.concatenate((overlap_buffer["t"], t_chunk))
+ x = np.concatenate((overlap_buffer["x"], x_chunk))
+
+ # If this is the first chunk, there's nothing to process if data is too short
+ if len(x) < smooth_n:
+ # Not enough data to process, just buffer it for the next chunk
+ state["overlap_buffer"] = {"t": t, "x": x}
+ return {
+ "state": state,
+ "bg_initial": np.array([], dtype=np.float32),
+ "global_noise": np.float32(0),
+ "events_initial": np.array([], dtype=np.float32),
+ "bg_clean": np.array([], dtype=np.float32),
+ "events": np.array([], dtype=np.float32),
+ }
+
+ # Update overlap buffer for next iteration
+ # The buffer should be `smooth_n` points long for filtering
+ overlap_size = min(len(t), smooth_n)
+ state["overlap_buffer"] = {
+ "t": t[-overlap_size:],
+ "x": x[-overlap_size:],
+ }
+
+ # Extract parameters from config
+ min_event_n = config["min_event_n"]
+ filter_type = config["filter_type"]
+ detection_snr = config["detection_snr"]
+ min_event_keep_snr = config["min_event_keep_snr"]
+ widen_frac = config["widen_frac"]
+ signal_polarity = config["signal_polarity"]
+ filter_order = config["filter_order"]
+
+ # Stage 2: Background Calculation
+ bg_initial = calculate_initial_background(t, x, smooth_n, filter_type)
+ global_noise = estimate_noise(x, bg_initial)
+
+ # Stage 3: Initial Event Detection
+ events_initial = detect_initial_events(
+ t,
+ x,
+ bg_initial,
+ global_noise,
+ detection_snr,
+ min_event_keep_snr,
+ widen_frac,
+ signal_polarity,
+ min_event_n,
+ )
+
+ # Stage 4: Clean Background Calculation
+ bg_clean = calculate_clean_background(
+ t, x, events_initial, smooth_n, bg_initial, filter_type, filter_order
+ )
+
+ # Stage 5: Final Event Detection
+ events = detect_final_events(
+ t,
+ x,
+ bg_clean,
+ global_noise,
+ detection_snr,
+ min_event_keep_snr,
+ widen_frac,
+ signal_polarity,
+ min_event_n,
+ )
+
+ # --- Handle events spanning chunk boundaries ---
+ # Check for and merge an incomplete event from the previous chunk
+ if state["incomplete_event"] is not None:
+ if len(events) > 0 and events[0, 0] <= t[0]:
+ # The first event in this chunk is a continuation of the previous one.
+ # Merge by updating the end time of the stored incomplete event.
+ state["incomplete_event"][1] = events[0, 1]
+ # Remove the partial event from this chunk's list.
+ events = events[1:]
+ else:
+ # The incomplete event was not continued. It's now complete.
+ # Add it to the final list and clear the state.
+ state["events"].append(np.array([state["incomplete_event"]]))
+ state["incomplete_event"] = None
+
+ # Check if the last event in this chunk is incomplete
+ if len(events) > 0:
+ # An event is incomplete if its end time is at or beyond the end of the
+ # current processing window `t`. `detect_events` extrapolates the end
+ # time, so a check for >= is sufficient.
+ if events[-1, 1] >= t[-1]:
+ # Store the incomplete event for the next chunk.
+ state["incomplete_event"] = events[-1]
+ # Remove it from this chunk's list.
+ events = events[:-1]
+
+ # Update state with the completed events from this chunk
+ if len(events) > 0:
+ state["events"].append(events)
+
+ return {
+ "state": state,
+ "bg_initial": bg_initial,
+ "global_noise": global_noise,
+ "events_initial": events_initial,
+ "bg_clean": bg_clean,
+ "events": events,
+ }
+
+
+def process_file(
+ name: str,
+ sampling_interval: float,
+ data_path: str,
+ smooth_win_t: Optional[float] = None,
+ smooth_win_f: Optional[float] = None,
+ detection_snr: float = 3.0,
+ min_event_keep_snr: float = 6.0,
+ min_event_t: float = 0.75e-6,
+ widen_frac: float = 10.0,
+ signal_polarity: int = -1,
+ max_plot_points: int = 10000,
+ envelope_mode_limit: float = 10e-3,
+ sidecar: Optional[str] = None,
+ crop: Optional[List[int]] = None,
+ yscale_mode: str = "snr",
+ show_plots: bool = True,
+ filter_type: str = "gaussian",
+ filter_order: int = 2,
+ chunk_size: Optional[int] = None,
+) -> None:
+ """
+ Process a single waveform file for event detection.
+
+ Parameters
+ ----------
+ name : str
+ Filename of the waveform data.
+ sampling_interval : float
+ Sampling interval in seconds.
+ data_path : str
+ Path to data directory.
+ smooth_win_t : Optional[float], default=None
+ Smoothing window in seconds.
+ smooth_win_f : Optional[float], default=None
+ Smoothing window in Hz.
+ detection_snr : float, default=3.0
+ Detection SNR threshold.
+ min_event_keep_snr : float, default=6.0
+ Minimum event keep SNR threshold.
+ min_event_t : float, default=0.75e-6
+ Minimum event duration in seconds.
+ widen_frac : float, default=10.0
+ Fraction to widen detected events.
+ signal_polarity : int, default=-1
+ Signal polarity (-1 for negative, +1 for positive).
+ max_plot_points : int, default=10000
+ Maximum plot points for decimation.
+ envelope_mode_limit : float, default=10e-3
+ Envelope mode limit.
+ sidecar : str, optional
+ XML sidecar filename.
+ crop : List[int], optional
+ Crop indices [start, end].
+ yscale_mode : str, default="snr"
+ Y-axis scaling mode for event plotter.
+ show_plots : bool, default=True
+ Whether to show plots interactively.
+ filter_type : str, default="gaussian"
+ Filter type for background smoothing: "savgol", "gaussian", "moving_average", "median".
+ filter_order : int, default=2
+ Order of the Savitzky-Golay filter (only used for filter_type="savgol").
+ """
+ start_time = time.time()
+ logger.info(f"Processing {name} with parameters:")
+
+ analysis_dir = data_path[:-1] if data_path.endswith("/") else data_path
+ analysis_dir += "_analysis/"
+ if not os.path.exists(analysis_dir):
+ os.makedirs(analysis_dir)
+
+ # Extract and save preview image
+ sidecar_path = _get_xml_sidecar_path(name, data_path, sidecar)
+ logger.info(f"Attempting to extract preview from: {sidecar_path}")
+ preview_path = os.path.join(analysis_dir, f"{name}_preview.png")
+ saved_preview = extract_preview_image(sidecar_path, preview_path)
+
+ if saved_preview and show_plots:
+ plot_preview_image(saved_preview, f"Preview: {name}")
+
+ # Calculate parameters
+ smooth_n, min_event_n = calculate_smoothing_parameters(
+ sampling_interval,
+ smooth_win_t,
+ smooth_win_f,
+ min_event_t,
+ detection_snr,
+ min_event_keep_snr,
+ widen_frac,
+ signal_polarity,
+ )
+
+ # --- Refactored analysis pipeline ---
+ config = {
+ "sampling_interval": sampling_interval,
+ "smooth_win_t": smooth_win_t,
+ "smooth_win_f": smooth_win_f,
+ "detection_snr": detection_snr,
+ "min_event_keep_snr": min_event_keep_snr,
+ "min_event_t": min_event_t,
+ "widen_frac": widen_frac,
+ "signal_polarity": signal_polarity,
+ "filter_type": filter_type,
+ "filter_order": filter_order,
+ "smooth_n": smooth_n,
+ "min_event_n": min_event_n,
+ }
+
+ state = initialize_state(config)
+
+ if chunk_size is None:
+ # --- Original full-file processing ---
+ t, x = load_data(name, sampling_interval, data_path, sidecar, crop)
+
+ # For now, process the entire file as a single chunk
+ process_start_time = time.time()
+ results = process_chunk((t, x), state)
+ final_events = get_final_events(results["state"])
+ logger.debug(f"Core processing took {time.time() - process_start_time:.3f}s")
+
+ # Extract intermediate results for plotting and analysis
+ bg_initial = results["bg_initial"]
+ global_noise = results["global_noise"]
+ bg_clean = results["bg_clean"]
+
+ # Analyze thresholds
+ detection_threshold, keep_threshold = analyze_thresholds(
+ x,
+ bg_clean,
+ global_noise,
+ detection_snr,
+ min_event_keep_snr,
+ signal_polarity,
+ )
+
+ # Stage 7: Event Analysis
+ analyze_events(t, x, bg_clean, final_events, global_noise, signal_polarity)
+
+ logger.debug(f"Total processing time: {time.time() - start_time:.3f}s")
+
+ # Stage 6: Visualization
+ plot = create_oscilloscope_plot(
+ t,
+ x,
+ bg_initial,
+ bg_clean,
+ final_events,
+ detection_threshold,
+ keep_threshold,
+ name,
+ detection_snr,
+ min_event_keep_snr,
+ max_plot_points,
+ envelope_mode_limit,
+ smooth_n,
+ global_noise=global_noise,
+ )
+
+ # Save plots
+
+ plot.save(analysis_dir + f"{name}_trace.png")
+
+ # Create event plotter
+ event_plotter = EventPlotter(
+ plot,
+ final_events,
+ bg_clean=bg_clean,
+ global_noise=global_noise,
+ y_scale_mode=yscale_mode,
+ )
+ event_plotter.plot_events_grid(max_events=16)
+ event_plotter.save(analysis_dir + f"{name}_events.png")
+
+ if show_plots:
+ plt.show(block=True)
+ else:
+ # --- Chunked processing ---
+ logger.info(
+ f"--- Starting chunked processing with chunk size: {chunk_size} ---"
+ )
+
+ chunk_generator = rd_chunked(
+ name,
+ chunk_size=chunk_size,
+ sampling_interval=sampling_interval,
+ data_path=data_path,
+ sidecar=sidecar,
+ )
+
+ process_start_time = time.time()
+
+ for t_chunk, x_chunk in chunk_generator:
+ results = process_chunk((t_chunk, x_chunk), state)
+ state = results["state"]
+
+ # After processing all chunks, add any remaining incomplete event
+ if state.get("incomplete_event") is not None:
+ state["events"].append(np.array([state["incomplete_event"]]))
+ state["incomplete_event"] = None
+
+ final_events = get_final_events(state)
+ logger.debug(f"Core processing took {time.time() - process_start_time:.3f}s")
+
+ logger.success(
+ f"Chunked processing complete. Found {len(final_events)} events."
+ )
+ if len(final_events) > 0:
+ logger.info("Final events (first 10):")
+ for i, event in enumerate(final_events[:10]):
+ logger.info(
+ f" Event {i+1}: start={event[0]:.6f}s, end={event[1]:.6f}s"
+ )
+
+ logger.warning("Plotting is disabled in chunked processing mode.")
diff --git a/src/transivent/event_detector.py b/src/transivent/event_detector.py
new file mode 100644
index 0000000..22d76d4
--- /dev/null
+++ b/src/transivent/event_detector.py
@@ -0,0 +1,404 @@
+from typing import Optional, Tuple
+
+import numpy as np
+from loguru import logger
+from numba import njit
+
+# --- Constants ---
+MEDIAN_TO_STD_FACTOR = (
+ 1.4826 # Factor to convert median absolute deviation to standard deviation
+)
+
+
+@njit
+def detect_events_numba(
+ time: np.ndarray,
+ signal: np.ndarray,
+ bg: np.ndarray,
+ snr_threshold: float,
+ min_event_len: int,
+ min_event_amp: float,
+ widen_frac: float,
+ global_noise: float,
+ signal_polarity: int,
+) -> np.ndarray:
+ """
+ Detect events in signal using Numba for performance.
+
+ Uses pre-allocated NumPy arrays instead of dynamic lists for better performance.
+
+ Parameters
+ ----------
+ time : np.ndarray
+ Time array (float32).
+ signal : np.ndarray
+ Input signal array (float32).
+ bg : np.ndarray
+ Background/baseline array (float32).
+ snr_threshold : float
+ Signal-to-noise ratio threshold for detection.
+ min_event_len : int
+ Minimum event length in samples.
+ min_event_amp : float
+ Minimum event amplitude threshold.
+ widen_frac : float
+ Fraction to widen detected events.
+ global_noise : float
+ Global noise level.
+ signal_polarity : int
+ Signal polarity: -1 for negative events, +1 for positive events.
+
+ Returns
+ -------
+ np.ndarray
+ Array of shape (n_events, 2) with start and end indices of events.
+ """
+ # Cast scalar parameters to float32 for consistency
+ snr_threshold = np.float32(snr_threshold)
+ min_event_amp = np.float32(min_event_amp)
+ widen_frac = np.float32(widen_frac)
+ global_noise = np.float32(global_noise)
+
+ if signal_polarity < 0:
+ threshold = bg - snr_threshold * global_noise
+ above = signal < threshold
+ else:
+ threshold = bg + snr_threshold * global_noise
+ above = signal > threshold
+
+ # Pre-allocate maximum possible events (worst case: every other sample is an event)
+ max_events = len(signal) // 2
+ events = np.empty((max_events, 2), dtype=np.int64)
+ event_count = 0
+
+ in_event = False
+ start = 0
+
+ for i in range(len(above)):
+ val = above[i]
+ if val and not in_event:
+ start = i
+ in_event = True
+ elif not val and in_event:
+ end = i
+ event_len = end - start
+ if event_len < min_event_len:
+ in_event = False
+ continue
+
+ # Amplitude filter
+ if min_event_amp > 0.0:
+ if signal_polarity < 0:
+ if np.min(signal[start:end] - bg[start:end]) > -min_event_amp:
+ in_event = False
+ continue
+ else:
+ if np.max(signal[start:end] - bg[start:end]) < min_event_amp:
+ in_event = False
+ continue
+
+ # Widen event
+ widen = int(widen_frac * (end - start))
+ new_start = max(0, start - widen)
+ new_end = min(len(signal), end + widen)
+
+ # Store indices for now, convert to time outside numba
+ events[event_count, 0] = new_start
+ events[event_count, 1] = new_end
+ event_count += 1
+ in_event = False
+
+ # Handle event at end of signal
+ if in_event:
+ end = len(signal)
+ event_len = end - start
+ if event_len >= min_event_len:
+ if min_event_amp > 0.0:
+ if signal_polarity < 0:
+ if np.min(signal[start:end] - bg[start:end]) <= -min_event_amp:
+ widen = int(widen_frac * (end - start))
+ new_start = max(0, start - widen)
+ new_end = min(len(signal), end + widen)
+ # Store indices for now, convert to time outside numba
+ events[event_count, 0] = new_start
+ events[event_count, 1] = new_end
+ event_count += 1
+ else:
+ if np.max(signal[start:end] - bg[start:end]) >= min_event_amp:
+ widen = int(widen_frac * (end - start))
+ new_start = max(0, start - widen)
+ new_end = min(len(signal), end + widen)
+ # Store indices for now, convert to time outside numba
+ events[event_count, 0] = new_start
+ events[event_count, 1] = new_end
+ event_count += 1
+ else:
+ widen = int(widen_frac * (end - start))
+ new_start = max(0, start - widen)
+ new_end = min(len(signal), end + widen)
+ # Store indices for now, convert to time outside numba
+ events[event_count, 0] = new_start
+ events[event_count, 1] = new_end
+ event_count += 1
+
+ # Return only the filled portion
+ return events[:event_count]
+
+
+@njit
+def merge_overlapping_events_numba(events: np.ndarray) -> np.ndarray:
+ """
+ Merge overlapping events using Numba for performance.
+
+ Parameters
+ ----------
+ events : np.ndarray
+ Array of shape (n_events, 2) with start and end times.
+
+ Returns
+ -------
+ np.ndarray
+ Array of merged events with shape (n_merged, 2).
+ """
+ n = len(events)
+ if n == 0:
+ return np.empty((0, 2), dtype=np.float32)
+ arr = events # type transfer
+ arr = arr[np.argsort(arr[:, 0])]
+ merged = np.empty((n, 2), dtype=np.float32)
+ count = 0
+ merged[count] = arr[0]
+ count += 1
+ for i in range(1, arr.shape[0]):
+ start, end = arr[i]
+ last_start, last_end = merged[count - 1]
+ if start <= last_end:
+ merged[count - 1, 1] = max(last_end, end)
+ else:
+ merged[count] = arr[i]
+ count += 1
+ return merged[:count]
+
+
+def detect_events(
+ time: np.ndarray,
+ signal: np.ndarray,
+ bg: np.ndarray,
+ snr_threshold: np.float32 = np.float32(2.0),
+ min_event_len: int = 20,
+ min_event_amp: np.float32 = np.float32(0.0),
+ widen_frac: np.float32 = np.float32(0.5),
+ global_noise: Optional[np.float32] = None,
+ signal_polarity: int = -1,
+) -> Tuple[np.ndarray, np.float32]:
+ """
+ Detect events in signal above background with specified thresholds.
+
+ Parameters
+ ----------
+ time : np.ndarray
+ Time array.
+ signal : np.ndarray
+ Input signal array.
+ bg : np.ndarray
+ Background/baseline array.
+ snr_threshold : np.float32, default=2.0
+ Signal-to-noise ratio threshold for detection.
+ min_event_len : int, default=20
+ Minimum event length in samples.
+ min_event_amp : np.float32, default=0.0
+ Minimum event amplitude threshold.
+ widen_frac : np.float32, default=0.5
+ Fraction to widen detected events.
+ global_noise : np.float32, optional
+ Global noise level. Must be provided.
+ signal_polarity : int, default=-1
+ Signal polarity: -1 for negative events (below background), +1 for positive events (above background).
+
+ Returns
+ -------
+ Tuple[np.ndarray, np.float32]
+ Array of detected events (time ranges) and global noise value.
+
+ Raises
+ ------
+ ValueError
+ If global_noise is not provided or input arrays are invalid.
+ """
+ if global_noise is None:
+ logger.error("global_noise was not provided to detect_events.")
+ raise ValueError("global_noise must be provided")
+
+ # Validate and convert input arrays
+ time = np.asarray(time, dtype=np.float32)
+ signal = np.asarray(signal, dtype=np.float32)
+ bg = np.asarray(bg, dtype=np.float32)
+
+ # Validate input data
+ _validate_detection_inputs(
+ time, signal, bg, snr_threshold, min_event_len, global_noise
+ )
+
+ events_indices = detect_events_numba(
+ time,
+ signal,
+ bg,
+ np.float32(snr_threshold),
+ int(min_event_len),
+ np.float32(min_event_amp),
+ np.float32(widen_frac),
+ np.float32(global_noise),
+ int(signal_polarity),
+ )
+
+ # Convert indices to time values outside of numba
+ events_array = np.empty_like(events_indices, dtype=np.float32)
+ for i in range(len(events_indices)):
+ start_idx = int(events_indices[i, 0])
+ end_idx = int(events_indices[i, 1])
+ events_array[i, 0] = time[start_idx]
+ if end_idx < len(time):
+ events_array[i, 1] = time[end_idx]
+ else:
+ # Event extends to the end of the signal. Extrapolate end time.
+ sampling_interval = time[1] - time[0] if len(time) > 1 else 0.0
+ events_array[i, 1] = time[-1] + sampling_interval
+
+ logger.info(f"Raw detection found {len(events_array)} events")
+
+ return events_array, np.float32(global_noise)
+
+
+def _validate_detection_inputs(
+ time: np.ndarray,
+ signal: np.ndarray,
+ bg: np.ndarray,
+ snr_threshold: np.float32,
+ min_event_len: int,
+ global_noise: np.float32,
+) -> None:
+ """
+ Validate inputs for event detection.
+
+ Parameters
+ ----------
+ time : np.ndarray
+ Time array.
+ signal : np.ndarray
+ Signal array.
+ bg : np.ndarray
+ Background array.
+ snr_threshold : np.float32
+ SNR threshold.
+ min_event_len : int
+ Minimum event length.
+ global_noise : np.float32
+ Global noise level.
+
+ Raises
+ ------
+ ValueError
+ If inputs are invalid.
+ """
+ # Check array lengths
+ if not (len(time) == len(signal) == len(bg)):
+ logger.warning(
+ f"Validation Warning: Array length mismatch: time={len(time)}, signal={len(signal)}, bg={len(bg)}. "
+ "This may lead to unexpected behaviour."
+ )
+
+ # Check for empty arrays
+ if len(time) == 0:
+ logger.warning(
+ "Validation Warning: Input arrays are empty. This may lead to unexpected behaviour."
+ )
+
+ # Check time monotonicity with a small tolerance for floating-point comparisons
+ if len(time) > 1:
+ # Use a small epsilon for floating-point comparison
+ # np.finfo(time.dtype).eps is the smallest representable positive number such that 1.0 + eps != 1.0
+ # Multiplying by a small factor (e.g., 10) provides a reasonable tolerance.
+ tolerance = np.finfo(time.dtype).eps * 10
+ if not np.all(np.diff(time) > tolerance):
+ # Log the problematic differences for debugging
+ problematic_diffs = np.diff(time)[np.diff(time) <= tolerance]
+ logger.warning(
+ f"Validation Warning: Time array is not strictly monotonic increasing within tolerance {tolerance}. "
+ f"Problematic diffs (first 10): {problematic_diffs[:10]}. This may lead to unexpected behaviour."
+ )
+
+ # Check parameter validity
+ if snr_threshold <= 0:
+ logger.warning(
+ f"Validation Warning: SNR threshold must be positive, got {snr_threshold}. This may lead to unexpected behaviour."
+ )
+
+ if min_event_len <= 0:
+ logger.warning(
+ f"Validation Warning: Minimum event length must be positive, got {min_event_len}. This may lead to unexpected behaviour."
+ )
+
+ if global_noise <= 0:
+ logger.warning(
+ f"Validation Warning: Global noise must be positive, got {global_noise}. This may lead to unexpected behaviour."
+ )
+
+ # Check for NaN/inf values
+ for name, arr in [("time", time), ("signal", signal), ("bg", bg)]:
+ if not np.all(np.isfinite(arr)):
+ logger.warning(
+ f"Validation Warning: {name} array contains NaN or infinite values. This may lead to unexpected behaviour."
+ )
+
+
+def merge_overlapping_events(events: np.ndarray) -> np.ndarray:
+ """
+ Merge overlapping events.
+
+ Parameters
+ ----------
+ events : np.ndarray
+ Array of events with shape (n_events, 2).
+
+ Returns
+ -------
+ np.ndarray
+ Array of merged events.
+
+ Raises
+ ------
+ ValueError
+ If events array has invalid format.
+ """
+ if len(events) == 0:
+ return np.empty((0, 2), dtype=np.float32)
+
+ # Validate events array format
+ events_array = np.asarray(events, dtype=np.float32)
+ if events_array.ndim != 2 or events_array.shape[1] != 2:
+ logger.warning(
+ f"Validation Warning: Events array must have shape (n_events, 2), got {events_array.shape}. This may lead to unexpected behaviour."
+ )
+ # This specific check is critical for the Numba function's array indexing,
+ # so it's safer to keep it as a ValueError if the shape is fundamentally wrong.
+ # However, for "very permissive", I'll change it to a warning and let Numba potentially fail later.
+ # If Numba fails, we can revert this specific one to ValueError.
+ # For now, let's make it a warning.
+ pass # Continue execution after warning
+
+ # Check for invalid events (start >= end)
+ invalid_mask = events_array[:, 0] >= events_array[:, 1]
+ if np.any(invalid_mask):
+ invalid_indices = np.where(invalid_mask)[0]
+ logger.warning(
+ f"Validation Warning: Invalid events found (start >= end) at indices: {invalid_indices}. This may lead to unexpected behaviour."
+ )
+
+ merged = merge_overlapping_events_numba(events_array)
+
+ if len(merged) != len(events):
+ logger.info(
+ f"Merged {len(events)} → {len(merged)} events ({len(events) - len(merged)} overlaps resolved)"
+ )
+
+ return merged
diff --git a/src/transivent/event_plotter.py b/src/transivent/event_plotter.py
new file mode 100644
index 0000000..c86cb14
--- /dev/null
+++ b/src/transivent/event_plotter.py
@@ -0,0 +1,524 @@
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import matplotlib.figure
+import matplotlib.pyplot as plt
+import numpy as np
+from loguru import logger
+
+from scopekit.display_state import (
+ _create_time_formatter,
+ _determine_offset_display_params,
+ _get_optimal_time_unit_and_scale,
+)
+from scopekit.plot import OscilloscopePlot
+
+
+class EventPlotter:
+ """
+ Provides utility functions for plotting individual events or event grids.
+ """
+
+ def __init__(
+ self,
+ osc_plot: OscilloscopePlot,
+ events: Optional[np.ndarray] = None,
+ trace_idx: int = 0,
+ bg_clean: Optional[np.ndarray] = None,
+ global_noise: Optional[np.float32] = None,
+ y_scale_mode: str = "raw",
+ ):
+ """
+ Initialize the EventPlotter with an OscilloscopePlot instance.
+
+ Parameters
+ ----------
+ osc_plot : OscilloscopePlot
+ An instance of OscilloscopePlot containing the waveform data.
+ events : Optional[np.ndarray], default=None
+ Events array with shape (n_events, 2) where each row is [start_time, end_time].
+ If None, will try to extract events from regions in the OscilloscopePlot.
+ trace_idx : int, default=0
+ Index of the trace to extract events from.
+ bg_clean : Optional[np.ndarray], default=None
+ The clean background signal array. This is needed for plotting background in event views.
+ global_noise : Optional[np.float32], default=None
+ The estimated global noise level. If provided, a noise ribbon will be plotted around bg_clean.
+ y_scale_mode : str, default="raw"
+ Y-axis scaling mode. Options:
+ - "raw": Raw signal values
+ - "percent": Percentage contrast relative to background ((signal - bg) / bg * 100)
+ - "snr": Signal-to-noise ratio ((signal - bg) / noise)
+ """
+ self.osc_plot = osc_plot
+ self.trace_idx = trace_idx
+ self.bg_clean = bg_clean
+ self.global_noise = global_noise # Store global_noise here
+ self.y_scale_mode = y_scale_mode
+
+ # Extract events from regions if not provided
+ if events is None:
+ self.events = self._extract_events_from_regions()
+ else:
+ self.events = events
+
+ if self.events is None or len(self.events) == 0:
+ logger.warning("EventPlotter initialized but no events are available.")
+ self.events = np.array([]) # Ensure it's an empty array if no events
+
+ # Validate y_scale_mode
+ valid_modes = ["raw", "percent", "snr"]
+ if self.y_scale_mode not in valid_modes:
+ logger.warning(
+ f"Invalid y_scale_mode '{self.y_scale_mode}'. Using 'raw'. Valid options: {valid_modes}"
+ )
+ self.y_scale_mode = "raw"
+
+ # Warn if scaling mode requires data that's not available
+ if self.y_scale_mode == "percent" and self.bg_clean is None:
+ logger.warning(
+ "y_scale_mode='percent' requires bg_clean data. Falling back to 'raw' mode."
+ )
+ self.y_scale_mode = "raw"
+ elif self.y_scale_mode == "snr" and self.global_noise is None:
+ logger.warning(
+ "y_scale_mode='snr' requires global_noise data. Falling back to 'raw' mode."
+ )
+ self.y_scale_mode = "raw"
+
+ self.fig: Optional[matplotlib.figure.Figure] = None
+
+ def save(self, filepath: str):
+ """
+ Save the current state of the EventPlotter to a file.
+
+ Parameters
+ ----------
+ filepath : str
+ Path to save the EventPlotter state.
+ """
+ if self.fig is not None:
+ self.fig.savefig(filepath)
+ logger.info(f"EventPlotter figure saved to {filepath}")
+
+ def _extract_events_from_regions(self) -> Optional[np.ndarray]:
+ """
+ Extract events from regions in the OscilloscopePlot.
+
+ Returns
+ -------
+ Optional[np.ndarray]
+ Events array with shape (n_events, 2) where each row is [start_time, end_time].
+ """
+ # First check if events are stored in the data manager
+ if hasattr(self.osc_plot.data, "get_events"):
+ events = self.osc_plot.data.get_events(self.trace_idx)
+ if events is not None:
+ return events
+
+ # If not, try to extract from regions (backward compatibility)
+ if not hasattr(self.osc_plot, "_regions") or not self.osc_plot._regions:
+ return None
+
+ # Extract regions from the specified trace
+ trace_regions = self.osc_plot._regions[self.trace_idx]
+ if not trace_regions:
+ return None
+
+ # Combine all regions into a single array
+ all_events = []
+ for region_def in trace_regions:
+ if "regions" in region_def and region_def["regions"] is not None:
+ all_events.append(region_def["regions"])
+
+ if not all_events:
+ return None
+
+ # Concatenate all region arrays
+ return np.vstack(all_events)
+
+ def _scale_y_data(
+ self, y_data: np.ndarray, bg_data: Optional[np.ndarray], mask: np.ndarray
+ ) -> Tuple[np.ndarray, str]:
+ """
+ Scale y-data according to the current scaling mode.
+
+ Parameters
+ ----------
+ y_data : np.ndarray
+ Raw signal data.
+ bg_data : Optional[np.ndarray]
+ Background data array (same length as full signal).
+ mask : np.ndarray
+ Boolean mask for extracting the relevant portion of bg_data.
+
+ Returns
+ -------
+ Tuple[np.ndarray, str]
+ Scaled y-data and appropriate y-axis label.
+ """
+ if self.y_scale_mode == "percent" and bg_data is not None:
+ bg_event = bg_data[mask]
+ # Avoid division by zero - use small value for near-zero background
+ bg_safe = np.where(np.abs(bg_event) < 1e-12, 1e-12, bg_event)
+ scaled_data = 100 * (y_data - bg_event) / bg_safe
+ return scaled_data, "Contrast (%)"
+ elif self.y_scale_mode == "snr" and self.global_noise is not None:
+ bg_event = bg_data[mask] if bg_data is not None else 0
+ scaled_data = (y_data - bg_event) / self.global_noise
+ return scaled_data, "Signal (σ)"
+ else:
+ return y_data, "Signal"
+
+ def _scale_background_data(self, bg_data: np.ndarray) -> np.ndarray:
+ """
+ Scale background data according to the current scaling mode.
+
+ Parameters
+ ----------
+ bg_data : np.ndarray
+ Background data.
+
+ Returns
+ -------
+ np.ndarray
+ Scaled background data.
+ """
+ if self.y_scale_mode == "percent":
+ # In percentage mode, background becomes 0% contrast
+ return np.zeros_like(bg_data)
+ elif self.y_scale_mode == "snr":
+ # In SNR mode, background becomes 0 sigma
+ return np.zeros_like(bg_data)
+ else:
+ return bg_data
+
+ def _scale_noise_ribbon(
+ self, bg_data: np.ndarray, noise_level: np.float32
+ ) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Scale noise ribbon bounds according to the current scaling mode.
+
+ Parameters
+ ----------
+ bg_data : np.ndarray
+ Background data.
+ noise_level : np.float32
+ Noise level (±1σ).
+
+ Returns
+ -------
+ Tuple[np.ndarray, np.ndarray]
+ Lower and upper bounds for noise ribbon.
+ """
+ if self.y_scale_mode == "percent":
+ # In percentage mode, noise becomes ±(noise/bg * 100)%
+ bg_safe = np.where(np.abs(bg_data) < 1e-12, 1e-12, bg_data)
+ noise_percent = 100 * noise_level / np.abs(bg_safe)
+ return -noise_percent, noise_percent
+ elif self.y_scale_mode == "snr":
+ # In SNR mode, noise becomes ±1σ
+ return np.full_like(bg_data, -1.0), np.full_like(bg_data, 1.0)
+ else:
+ return bg_data - noise_level, bg_data + noise_level
+
+ def plot_single_event(self, event_index: int) -> None:
+ """
+ Plot an individual event.
+
+ Parameters
+ ----------
+ event_index : int
+ Index of the event to plot.
+ """
+ if self.events is None or len(self.events) == 0:
+ logger.warning("No events available to plot.")
+ return
+
+ if not (0 <= event_index < len(self.events)):
+ logger.warning(
+ f"Event index {event_index} out of bounds. Total events: {len(self.events)}."
+ )
+ return
+
+ t_start, t_end = self.events[event_index]
+
+ # Get the raw data for the specific trace
+ t_raw = self.osc_plot.data.t_arrays[self.trace_idx]
+ x_raw = self.osc_plot.data.x_arrays[self.trace_idx]
+
+ # Define a window around the event
+ event_duration = t_end - t_start
+ plot_start_time = t_start - event_duration * 0.5
+ plot_end_time = t_end + event_duration * 0.5
+
+ # Get data within the plot window
+ mask = (t_raw >= plot_start_time) & (t_raw <= plot_end_time)
+ t_event_raw = t_raw[mask]
+ x_event = x_raw[mask]
+
+ if not np.any(mask):
+ logger.warning(
+ f"No data found for event {event_index} in time range [{t_start:.6f}, {t_end:.6f}]"
+ )
+ return
+
+ # Extract event data and make relative to plot window start
+ t_event_raw_relative = t_event_raw - plot_start_time
+
+ # Determine offset display parameters
+ time_span_raw = plot_end_time - plot_start_time
+ event_time_unit, display_scale, offset_time_raw, offset_unit = (
+ _determine_offset_display_params(
+ (plot_start_time, plot_end_time), time_span_raw
+ )
+ )
+
+ # Scale for display using the display_scale from offset params
+ t_event_display = t_event_raw_relative * display_scale
+
+ # Create time formatter for axis
+ time_formatter = _create_time_formatter(offset_time_raw, display_scale)
+
+ # Scale the data according to the selected mode
+ x_event_scaled, ylabel = self._scale_y_data(x_event, self.bg_clean, mask)
+
+ self.fig, ax_ev = plt.subplots(figsize=(6, 3))
+ ax_ev.plot(
+ t_event_display,
+ x_event_scaled,
+ label="Event",
+ color="black",
+ # marker="o",
+ mfc="none",
+ )
+
+ if self.bg_clean is not None:
+ bg_event_scaled = self._scale_background_data(self.bg_clean[mask])
+ ax_ev.plot(
+ t_event_display,
+ bg_event_scaled,
+ label="BG",
+ color="orange",
+ ls="--",
+ )
+ if self.global_noise is not None:
+ # Plot noise ribbon around the background
+ noise_lower, noise_upper = self._scale_noise_ribbon(
+ self.bg_clean[mask], self.global_noise
+ )
+ ax_ev.fill_between(
+ t_event_display,
+ noise_lower,
+ noise_upper,
+ color="gray",
+ alpha=0.3,
+ label="Noise (±1σ)",
+ )
+ else:
+ logger.warning(
+ "Clean background (bg_clean) not provided to EventPlotter, cannot plot."
+ )
+
+ # Set xlabel with offset if applicable
+ if offset_time_raw is not None:
+ # Use the offset unit for display, not the event time unit
+ offset_scale = 1.0
+ if offset_unit == "ms":
+ offset_scale = 1e3
+ elif offset_unit == "us":
+ offset_scale = 1e6
+ elif offset_unit == "ns":
+ offset_scale = 1e9
+
+ offset_display = offset_time_raw * offset_scale
+ ax_ev.set_xlabel(
+ f"Time ({event_time_unit}) + {offset_display:.3g} {offset_unit}"
+ )
+ else:
+ ax_ev.set_xlabel(f"Time ({event_time_unit})")
+
+ # Apply the time formatter to x-axis
+ ax_ev.xaxis.set_major_formatter(time_formatter)
+ # Use a shorter title - just the base trace name without noise info
+ trace_name = self.osc_plot.data.get_trace_name(self.trace_idx)
+ # Remove noise information from title if present
+ clean_name = trace_name.split(" | ")[0] if " | " in trace_name else trace_name
+ ax_ev.set_title(f"{clean_name} - Event {event_index + 1}")
+ ax_ev.set_ylabel(ylabel)
+ ax_ev.legend(loc="lower right")
+
+ def plot_events_grid(self, max_events: int = 16) -> None:
+ """
+ Plot multiple events in a subplot grid.
+
+ Parameters
+ ----------
+ max_events : int, default=16
+ Maximum number of events to plot in the grid.
+ """
+ if self.events is None or len(self.events) == 0:
+ logger.warning("No events available to plot.")
+ return
+
+ # Limit number of events
+ n_events = min(len(self.events), max_events)
+ events_to_plot = self.events[:n_events]
+
+ # Determine grid size
+ if n_events <= 4:
+ rows, cols = 2, 2
+ elif n_events <= 9:
+ rows, cols = 3, 3
+ elif n_events <= 16:
+ rows, cols = 4, 4
+ elif n_events <= 25:
+ rows, cols = 5, 5
+ else:
+ rows, cols = 6, 6 # Maximum 36 events
+
+ self.fig, axes = plt.subplots(
+ rows, cols, figsize=(cols * 4, rows * 3), sharey=True
+ ) # Sharey for consistent amplitude scale
+ # Get trace name safely and clean it
+ trace_name = self.osc_plot.data.get_trace_name(self.trace_idx)
+ # Remove noise information from title if present
+ clean_name = trace_name.split(" | ")[0] if " | " in trace_name else trace_name
+
+ self.fig.suptitle(
+ f"{clean_name} - Events 1-{n_events} (of {len(self.events)} total)",
+ fontsize=12,
+ )
+
+ # Flatten axes for easier indexing
+ if rows == 1 and cols == 1:
+ axes = [axes]
+ elif rows == 1 or cols == 1:
+ axes = axes.flatten()
+ else:
+ axes = axes.flatten()
+
+ # Get the raw data for the specific trace once
+ t_raw_full = self.osc_plot.data.t_arrays[self.trace_idx]
+ x_raw_full = self.osc_plot.data.x_arrays[self.trace_idx]
+
+ for i, (t_start, t_end) in enumerate(events_to_plot):
+ ax = axes[i]
+
+ # Define a window around the event
+ event_duration = t_end - t_start
+ plot_start_time = t_start - event_duration * 0.5
+ plot_end_time = t_end + event_duration * 0.5
+
+ # Extract event data
+ mask = (t_raw_full >= plot_start_time) & (t_raw_full <= plot_end_time)
+ t_event_raw = t_raw_full[mask]
+ x_event = x_raw_full[mask]
+
+ if not np.any(mask):
+ ax.text(
+ 0.5,
+ 0.5,
+ f"Event {i + 1}\nNo data",
+ ha="center",
+ va="center",
+ transform=ax.transAxes,
+ )
+ ax.set_xticks([])
+ ax.set_yticks([])
+ continue
+
+ # Make time relative to plot window start
+ t_event_raw_relative = t_event_raw - plot_start_time
+
+ # Determine offset display parameters for this event
+ time_span_raw = plot_end_time - plot_start_time
+ event_time_unit, display_scale, offset_time_raw, offset_unit = (
+ _determine_offset_display_params(
+ (plot_start_time, plot_end_time), time_span_raw
+ )
+ )
+
+ # Scale for display using the display_scale from offset params
+ t_event_display = t_event_raw_relative * display_scale
+
+ # Create time formatter for axis
+ time_formatter = _create_time_formatter(offset_time_raw, display_scale)
+
+ # Scale the data according to the selected mode
+ x_event_scaled, ylabel = self._scale_y_data(x_event, self.bg_clean, mask)
+
+ # Plot event
+ ax.plot(
+ t_event_display,
+ x_event_scaled,
+ "-ok",
+ mfc="none",
+ linewidth=1,
+ label="Signal",
+ ms=4,
+ )
+
+ if self.bg_clean is not None:
+ bg_event_scaled = self._scale_background_data(self.bg_clean[mask])
+ ax.plot(
+ t_event_display,
+ bg_event_scaled,
+ "orange",
+ linestyle="--",
+ alpha=0.7,
+ label="BG",
+ )
+ if self.global_noise is not None:
+ # Plot noise ribbon around the background
+ noise_lower, noise_upper = self._scale_noise_ribbon(
+ self.bg_clean[mask], self.global_noise
+ )
+ ax.fill_between(
+ t_event_display,
+ noise_lower,
+ noise_upper,
+ color="gray",
+ alpha=0.3,
+ label="Noise (±1σ)",
+ )
+ else:
+ logger.warning(
+ f"Background data not available for event {i + 1}. Ensure bg_clean is passed to EventPlotter."
+ )
+
+ # Formatting
+ ax.set_title(f"Event {i + 1}", fontsize=10)
+
+ # Set xlabel with offset if applicable
+ if offset_time_raw is not None:
+ # Use the offset unit for display, not the event time unit
+ offset_scale = 1.0
+ if offset_unit == "ms":
+ offset_scale = 1e3
+ elif offset_unit == "us":
+ offset_scale = 1e6
+ elif offset_unit == "ns":
+ offset_scale = 1e9
+
+ offset_display = offset_time_raw * offset_scale
+ ax.set_xlabel(
+ f"Time ({event_time_unit}) + {offset_display:.3g} {offset_unit}",
+ fontsize=8,
+ )
+ else:
+ ax.set_xlabel(f"Time ({event_time_unit})", fontsize=8)
+
+ ax.set_ylabel(ylabel, fontsize=8)
+ ax.tick_params(labelsize=7)
+
+ # Apply the time formatter to x-axis
+ ax.xaxis.set_major_formatter(time_formatter)
+
+ # Only show legend on first subplot
+ if i == 0:
+ ax.legend(fontsize=7, loc="best")
+
+ # Hide unused subplots
+ for i in range(n_events, len(axes)):
+ axes[i].set_visible(False)
diff --git a/src/transivent/io.py b/src/transivent/io.py
new file mode 100644
index 0000000..b7ad1c5
--- /dev/null
+++ b/src/transivent/io.py
@@ -0,0 +1,456 @@
+import os
+import xml.etree.ElementTree as ET
+from typing import Any, Dict, List, Optional, Tuple, Generator
+from warnings import warn
+
+import numpy as np
+from loguru import logger
+
+
+def _get_xml_sidecar_path(
+ bin_filename: str,
+ data_path: Optional[str] = None,
+ sidecar: Optional[str] = None
+) -> str:
+ """
+ Determine the XML sidecar file path using consistent logic.
+
+ Parameters
+ ----------
+ bin_filename : str
+ Name of the binary waveform file.
+ data_path : str, optional
+ Path to the data directory.
+ sidecar : str, optional
+ Name of the XML sidecar file. If None, auto-detects from bin_filename.
+
+ Returns
+ -------
+ str
+ Full path to the XML sidecar file.
+ """
+ if sidecar is not None:
+ sidecar_path = (
+ os.path.join(data_path, sidecar)
+ if data_path is not None and not os.path.isabs(sidecar)
+ else sidecar
+ )
+ else:
+ base = os.path.splitext(bin_filename)[0]
+ if base.endswith(".Wfm"):
+ sidecar_guess = base[:-4] + ".bin"
+ else:
+ sidecar_guess = base + ".bin"
+ sidecar_path = (
+ os.path.join(data_path, sidecar_guess)
+ if data_path is not None
+ else sidecar_guess
+ )
+ return sidecar_path
+
+
+def get_waveform_params(
+ bin_filename: str,
+ data_path: Optional[str] = None,
+ sidecar: Optional[str] = None,
+) -> Dict[str, Any]:
+ """
+ Parse XML sidecar file to extract waveform parameters.
+
+ Given a binary waveform filename, find and parse the corresponding XML sidecar file.
+ If sidecar is provided, use it directly. Otherwise, guess from bin_filename.
+
+ Parameters
+ ----------
+ bin_filename : str
+ Name of the binary waveform file.
+ data_path : str, optional
+ Path to the data directory. If None, uses current directory.
+ sidecar : str, optional
+ Name of the XML sidecar file. If None, guesses from bin_filename.
+
+ Returns
+ -------
+ Dict[str, Any]
+ Dictionary with keys: sampling_interval, vertical_scale, vertical_offset,
+ byte_order, signal_format.
+
+ Raises
+ ------
+ FileNotFoundError
+ If the XML sidecar file is not found.
+ RuntimeError
+ If the XML file cannot be parsed.
+
+ Warns
+ -----
+ UserWarning
+ If sampling resolution is not found in XML.
+ """
+ sidecar_path = _get_xml_sidecar_path(bin_filename, data_path, sidecar)
+ params = {
+ "sampling_interval": None,
+ "vertical_scale": None,
+ "vertical_offset": None,
+ "byte_order": "LSB", # default
+ "signal_format": "float32", # default
+ "signal_hardware_record_length": None,
+ }
+ found_resolution = False
+ if not os.path.exists(sidecar_path):
+ msg = (
+ f"XML sidecar file not found: {sidecar_path}\n"
+ f" bin_filename: {bin_filename}\n"
+ f" sidecar: {sidecar}\n"
+ f" data_path: {data_path}\n"
+ f" Tried path: {sidecar_path}\n"
+ f"Please check that the XML sidecar exists and the path is correct."
+ )
+ raise FileNotFoundError(msg)
+ try:
+ tree = ET.parse(sidecar_path)
+ root = tree.getroot()
+
+ # Validate XML structure
+ if root is None:
+ raise RuntimeError(f"XML file has no root element: {sidecar_path}")
+
+ # Track which parameters we found for validation
+ found_params = set()
+ signal_resolution = None
+ resolution = None
+
+ for prop in root.iter("Prop"):
+ if prop.attrib is None:
+ logger.warning(f"Found Prop element with no attributes in {sidecar_path}")
+ continue
+
+ name = prop.attrib.get("Name", "")
+ value = prop.attrib.get("Value", "")
+
+ if not name:
+ logger.warning(
+ f"Found Prop element with empty Name attribute in {sidecar_path}"
+ )
+ continue
+
+ try:
+ if name == "Resolution":
+ params["sampling_interval"] = float(value)
+ found_resolution = True
+ found_params.add("SignalResolution")
+ resolution = float(value)
+ elif name == "SignalResolution" and params["sampling_interval"] is None:
+ params["sampling_interval"] = float(value)
+ found_resolution = True
+ found_params.add("Resolution")
+ signal_resolution = float(value)
+ elif name == "SignalResolution":
+ signal_resolution = float(value) # store val even if Resolution is found
+ elif name == "ByteOrder":
+ if not value:
+ logger.warning(
+ f"Empty ByteOrder value in {sidecar_path}, using default LSB"
+ )
+ continue
+ params["byte_order"] = "LSB" if "LSB" in value else "MSB"
+ found_params.add("ByteOrder")
+ elif name == "SignalFormat":
+ if not value:
+ logger.warning(
+ f"Empty SignalFormat value in {sidecar_path}, using default float32"
+ )
+ continue
+ if "FLOAT" in value:
+ params["signal_format"] = "float32"
+ elif "INT16" in value:
+ params["signal_format"] = "int16"
+ elif "INT32" in value:
+ params["signal_format"] = "int32"
+ else:
+ logger.warning(
+ f"Unknown SignalFormat '{value}' in {sidecar_path}, using default float32"
+ )
+ found_params.add("SignalFormat")
+ elif name == "SignalHardwareRecordLength":
+ params["signal_hardware_record_length"] = int(value)
+ found_params.add("SignalHardwareRecordLength")
+ except ValueError as e:
+ logger.warning(
+ f"Failed to parse {name} value '{value}' in {sidecar_path}: {e}"
+ )
+ continue
+
+ # Validate critical parameters
+ if not found_resolution:
+ warn(
+ "Neither 'Resolution' nor 'SignalResolution' found in XML. "
+ + "Using default sampling_interval=None. "
+ + "Please provide a value or check your XML."
+ )
+ if (
+ "SignalResolution" in found_params
+ and "Resolution" in found_params
+ and not np.isclose(signal_resolution, resolution, rtol=1e-2, atol=1e-9)
+ ):
+ logger.warning(
+ f"FYI: 'Resolution' ({resolution}) != SignalResolution' ({signal_resolution}) found in {sidecar_path}. "
+ f"Using 'Resolution' ({signal_resolution}). Diff: {abs(signal_resolution - resolution)}"
+ )
+
+ # Log what we found for debugging
+ logger.debug(f"XML parsing found parameters: {found_params}")
+
+ # Validate sampling interval if found
+ if params["sampling_interval"] is not None and params["sampling_interval"] <= 0:
+ logger.warning(
+ f"Invalid sampling interval {params['sampling_interval']} in {sidecar_path}. "
+ "This may lead to issues with time array generation."
+ )
+
+ except ET.ParseError as e:
+ raise RuntimeError(f"XML parsing error in {sidecar_path}: {e}")
+ except Exception as e:
+ raise RuntimeError(f"Failed to parse XML sidecar: {sidecar_path}: {e}")
+ return params
+
+
+def rd(
+ filename: str,
+ sampling_interval: Optional[float] = None,
+ data_path: Optional[str] = None,
+ sidecar: Optional[str] = None,
+ crop: Optional[List[int]] = None,
+) -> Tuple[np.ndarray, np.ndarray]:
+ """
+ Read waveform binary file using sidecar XML for parameters.
+
+ Parameters
+ ----------
+ filename : str
+ Name of the binary waveform file.
+ sampling_interval : float, optional
+ Sampling interval in seconds. If None, reads from XML sidecar.
+ data_path : str, optional
+ Path to the data directory.
+ sidecar : str, optional
+ Name of the XML sidecar file.
+ crop : List[int], optional
+ Crop indices [start, end]. If None, uses entire signal.
+
+ Returns
+ -------
+ Tuple[np.ndarray, np.ndarray]
+ Time array (float32) and scaled signal array (float32).
+
+ Raises
+ ------
+ RuntimeError
+ If sampling interval cannot be determined.
+ FileNotFoundError
+ If the binary file is not found.
+ """
+ # Always join data_path and filename if filename is not absolute
+ if data_path is not None and not os.path.isabs(filename):
+ fp = os.path.join(data_path, filename)
+ else:
+ fp = filename
+ params = get_waveform_params(
+ os.path.basename(fp), data_path, sidecar=sidecar
+ )
+ # Use sampling_interval from XML if available, else argument, else raise error
+ si = params["sampling_interval"]
+ if si is None:
+ if sampling_interval is not None:
+ si = sampling_interval
+ else:
+ raise RuntimeError(
+ f"Sampling interval could not be determined for file: {fp}. "
+ + "Please provide it or ensure the XML sidecar is present."
+ )
+ # log info about what we're reading and the parameters
+ rel_fp = os.path.relpath(fp, os.getcwd()) if os.path.isabs(fp) else fp
+ logger.info(f"Reading binary file: {rel_fp}")
+ if sidecar:
+ sidecar_path = _get_xml_sidecar_path(os.path.basename(fp), data_path, sidecar)
+ rel_sidecar = (
+ os.path.relpath(sidecar_path, os.getcwd())
+ if os.path.isabs(sidecar_path)
+ else sidecar_path
+ )
+ logger.info(f"--Using sidecar XML: {rel_sidecar}")
+ logger.info(f"--Sampling interval: {si}")
+ logger.info(f"--Byte order: {params['byte_order']}")
+ logger.info(f"--Signal format: {params['signal_format']}")
+ # Determine dtype
+ dtype = np.float32
+ if params["signal_format"] == "int16":
+ dtype = np.int16
+ elif params["signal_format"] == "int32":
+ dtype = np.int32
+ # Determine byte order
+ byteorder = "<" if params["byte_order"] == "LSB" else ">"
+ try:
+ with open(fp, "rb") as f:
+ import struct
+ # Read first two bytes into two 32-bit unsigned integers,
+ header_bytes = f.read(8)
+ elsize, record_length_from_header = struct.unpack('<II', header_bytes)
+ logger.success(f"Bin header: data el. size: {elsize} (bytes)")
+ logger.success(f"Bin header: length: {record_length_from_header} ({elsize}-byte nums)")
+ params["record_length_from_header"] = record_length_from_header
+ if params["signal_hardware_record_length"] != record_length_from_header:
+ logger.warning(
+ f"SignalHardwareRecordLength ({params['signal_hardware_record_length']}) "
+ f"does not match header record length ({record_length_from_header}) in {rel_fp}. "
+ "This may indicate a mismatch in expected data length."
+ )
+
+ # first 8 bytes are the header (equiv to 2 float32s)
+ arr = np.fromfile(fp, dtype=byteorder + dtype().dtype.char, offset=8)
+
+ # Validate expected data length if available
+ expected_length = params["signal_hardware_record_length"]
+ if expected_length is not None:
+ if len(arr) != expected_length:
+ # raise RuntimeError(
+ logger.warning(
+ f"Data length mismatch in {rel_fp}: "
+ f"expected {expected_length} points from SignalHardwareRecordLength, "
+ f"but read {len(arr)} points from binary file"
+ )
+
+ if crop is not None:
+ x = arr[crop[0] : crop[1]]
+ else:
+ x = arr
+ except FileNotFoundError:
+ raise FileNotFoundError(
+ f"The file '{fp}' was not found. "
+ + "Please ensure the file is in the correct directory."
+ )
+ # x = x.astype(np.float32) # NB: data is already in physical units (V)
+
+ # Use np.linspace for more robust time array generation
+ num_points = len(x)
+ if num_points > 0:
+ t = np.linspace(0, (num_points - 1) * si, num_points, dtype=np.float32)
+ else:
+ t = np.array([], dtype=np.float32)
+ logger.warning(
+ f"Generated an empty time array for file {rel_fp}. "
+ f"Length of signal: {len(x)}, sampling interval: {si}. "
+ "This might indicate an issue with input data or sampling interval."
+ )
+
+ return t, x
+
+
+def rd_chunked(
+ filename: str,
+ chunk_size: int,
+ sampling_interval: Optional[float] = None,
+ data_path: Optional[str] = None,
+ sidecar: Optional[str] = None,
+) -> Generator[Tuple[np.ndarray, np.ndarray], None, None]:
+ """
+ Read waveform binary file in chunks using sidecar XML for parameters.
+
+ This is a generator function that yields chunks of data.
+
+ Parameters
+ ----------
+ filename : str
+ Name of the binary waveform file.
+ chunk_size : int
+ Number of points per chunk.
+ sampling_interval : float, optional
+ Sampling interval in seconds. If None, reads from XML sidecar.
+ data_path : str, optional
+ Path to the data directory.
+ sidecar : str, optional
+ Name of the XML sidecar file.
+
+ Yields
+ ------
+ Tuple[np.ndarray, np.ndarray]
+ Time array (float32) and signal array (float32) for each chunk.
+ """
+ if data_path is not None and not os.path.isabs(filename):
+ fp = os.path.join(data_path, filename)
+ else:
+ fp = filename
+
+ params = get_waveform_params(os.path.basename(fp), data_path, sidecar=sidecar)
+ si = params["sampling_interval"]
+ if si is None:
+ if sampling_interval is not None:
+ si = sampling_interval
+ else:
+ raise RuntimeError(f"Sampling interval could not be determined for file: {fp}.")
+
+ dtype = np.float32
+ if params["signal_format"] == "int16":
+ dtype = np.int16
+ elif params["signal_format"] == "int32":
+ dtype = np.int32
+
+ byteorder = "<" if params["byte_order"] == "LSB" else ">"
+ full_dtype_str = byteorder + dtype().dtype.char
+
+ header_size_bytes = 8
+
+ try:
+ with open(fp, "rb") as f:
+ # Read header
+ header_bytes = f.read(header_size_bytes)
+ if len(header_bytes) < header_size_bytes:
+ logger.warning("Could not read full header from binary file.")
+ return
+
+ import struct
+
+ elsize, record_length_from_header = struct.unpack("<II", header_bytes)
+ logger.success(f"Bin header: data el. size: {elsize} (bytes)")
+ logger.success(
+ f"Bin header: length: {record_length_from_header} ({elsize}-byte nums)"
+ )
+
+ total_points = params.get("signal_hardware_record_length")
+ if total_points is None:
+ total_points = record_length_from_header
+ logger.warning(
+ f"SignalHardwareRecordLength not found. Using length from header: {total_points} points."
+ )
+ elif total_points != record_length_from_header:
+ logger.warning(
+ f"SignalHardwareRecordLength ({total_points}) "
+ f"does not match header record length ({record_length_from_header}) in {fp}. "
+ "Using header length."
+ )
+ total_points = record_length_from_header
+
+ current_pos = 0
+ while current_pos < total_points:
+ points_to_read = min(chunk_size, total_points - current_pos)
+
+ x_chunk = np.fromfile(f, dtype=full_dtype_str, count=points_to_read)
+
+ if len(x_chunk) == 0:
+ break
+
+ start_time = current_pos * si
+ num_points = len(x_chunk)
+ t_chunk = np.linspace(
+ start_time,
+ start_time + (num_points - 1) * si,
+ num_points,
+ dtype=np.float32,
+ )
+
+ yield t_chunk, x_chunk.astype(np.float32)
+
+ current_pos += points_to_read
+
+ except FileNotFoundError:
+ raise FileNotFoundError(f"The file '{fp}' was not found.")