From 1a50424da910a2a3cfd724daa2c0be29f36eb4d5 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 12 Apr 2025 11:22:06 -0400 Subject: [PATCH 001/133] Cell magics are run ina subprocess and the errors are captured, therefore failing tutorials were showing as passing. Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/tutorials/run_tutorials.py b/docs/tutorials/run_tutorials.py index feb12e8f..39165759 100755 --- a/docs/tutorials/run_tutorials.py +++ b/docs/tutorials/run_tutorials.py @@ -15,7 +15,7 @@ from pathlib import Path import nbformat -from nbconvert.preprocessors import ExecutePreprocessor +from nbconvert.preprocessors import ExecutePreprocessor, RegexRemovePreprocessor from nbconvert import HTMLExporter from nbconvert.writers import FilesWriter @@ -221,6 +221,14 @@ def run_tutorial(tutorial): with (open(nb_path) as nb_file): nb = nbformat.read(nb_file, as_version=nbformat.NO_CONVERT) + # Remove magic, which can make a failing notebook look + # like it succeeded. + for cell in nb.cells: + if cell.cell_type == 'code': + source = cell.source.strip("\n").lstrip() + if len(source) >= 1 and source[0] == "%": + cell.source = cell.source.replace("%", "#[magic commented out by run_tutorials.py]%") + logger.info(f"Executing notebook {source_nb_path}...") start_time = timeit.default_timer() ep = ExecutePreprocessor(timeout=config['globals:timeout'], kernel_name=config['globals:kernel']) From 6559f842c8531510f22a9ba6a198f6119dcd6ee0 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 12 Apr 2025 18:25:06 -0400 Subject: [PATCH 002/133] Remove only magic cell. I think magic lines are not problematic Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/tutorials/run_tutorials.py b/docs/tutorials/run_tutorials.py index 39165759..747b8957 100755 --- a/docs/tutorials/run_tutorials.py +++ b/docs/tutorials/run_tutorials.py @@ -226,8 +226,8 @@ def run_tutorial(tutorial): for cell in nb.cells: if cell.cell_type == 'code': source = cell.source.strip("\n").lstrip() - if len(source) >= 1 and source[0] == "%": - cell.source = cell.source.replace("%", "#[magic commented out by run_tutorials.py]%") + if len(source) >= 2 and source[:2] == "%%": + cell.source = cell.source.replace("%%", "#[magic commented out by run_tutorials.py]%%") logger.info(f"Executing notebook {source_nb_path}...") start_time = timeit.default_timer() From 55d9d7eca6a26c5c259b95ecb2009f8debdff7aa Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 12 Apr 2025 18:25:49 -0400 Subject: [PATCH 003/133] Add test for magic cells. Also serves as a negative control Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.yml | 2 + .../this_test_must_fail_remove_magic.ipynb | 73 +++++++++++++++++++ 2 files changed, 75 insertions(+) create mode 100644 docs/tutorials/test/this_test_must_fail_remove_magic.ipynb diff --git a/docs/tutorials/run_tutorials.yml b/docs/tutorials/run_tutorials.yml index b6433478..1fabbe4a 100644 --- a/docs/tutorials/run_tutorials.yml +++ b/docs/tutorials/run_tutorials.yml @@ -31,6 +31,8 @@ tutorials: unzip: True # Optional. False by default #unzip_output: # Optional, if the unzipped file name is different from just removing the .zip or .gz + test_magic_test_must_fail: + notebook: test/this_test_must_fail_remove_magic.ipynb dataIO: notebook: DataIO/DataIO_example.ipynb diff --git a/docs/tutorials/test/this_test_must_fail_remove_magic.ipynb b/docs/tutorials/test/this_test_must_fail_remove_magic.ipynb new file mode 100644 index 00000000..2f4277f7 --- /dev/null +++ b/docs/tutorials/test/this_test_must_fail_remove_magic.ipynb @@ -0,0 +1,73 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "b82642d2-a68f-41d2-8a63-3fee53880c71", + "metadata": {}, + "source": [ + "# Test magic removal" + ] + }, + { + "cell_type": "markdown", + "id": "b42b3af6-b4e6-4809-bbcb-8917202d5c44", + "metadata": {}, + "source": [ + "Magic cells are run in a subprocess, which catches exceptions and makes it look like the notebook succeeded " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e9ae042-38d3-4f51-9fd9-0f630bdc1e48", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# This should fail since \"five\" has not been defined.\n", + "5*five" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb67f11c-b8cc-42ef-ac81-ac01fd6a88da", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "# It shouldn't make it to this cell\n", + "5*5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac335794-7781-410e-9b60-d0dfe2ef6ad3", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:cosipy]", + "language": "python", + "name": "conda-env-cosipy-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 0e408cc57e3b8f2d763083b1d413e4164d94a6de Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 12 Apr 2025 18:29:02 -0400 Subject: [PATCH 004/133] Change test name Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.yml | 4 ++-- ...ust_fail_remove_magic.ipynb => test_must_fail_magic.ipynb} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename docs/tutorials/test/{this_test_must_fail_remove_magic.ipynb => test_must_fail_magic.ipynb} (100%) diff --git a/docs/tutorials/run_tutorials.yml b/docs/tutorials/run_tutorials.yml index 1fabbe4a..214aea9c 100644 --- a/docs/tutorials/run_tutorials.yml +++ b/docs/tutorials/run_tutorials.yml @@ -31,8 +31,8 @@ tutorials: unzip: True # Optional. False by default #unzip_output: # Optional, if the unzipped file name is different from just removing the .zip or .gz - test_magic_test_must_fail: - notebook: test/this_test_must_fail_remove_magic.ipynb + test_must_fail_magic: + notebook: test/test_must_fail_magic.ipynb dataIO: notebook: DataIO/DataIO_example.ipynb diff --git a/docs/tutorials/test/this_test_must_fail_remove_magic.ipynb b/docs/tutorials/test/test_must_fail_magic.ipynb similarity index 100% rename from docs/tutorials/test/this_test_must_fail_remove_magic.ipynb rename to docs/tutorials/test/test_must_fail_magic.ipynb From 83d04afb0e4e003afd7ac8d84ca2f4a81f82b526 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 12 Apr 2025 18:34:40 -0400 Subject: [PATCH 005/133] Fail successfully = Green! Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/tutorials/run_tutorials.py b/docs/tutorials/run_tutorials.py index 747b8957..0a71da23 100755 --- a/docs/tutorials/run_tutorials.py +++ b/docs/tutorials/run_tutorials.py @@ -278,7 +278,11 @@ def run_tutorial(tutorial): if succeeded: logger.info(colorama.Fore.GREEN + "SUCCEEDED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") else: - logger.info(colorama.Fore.RED + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") + color = colorama.Fore.RED + if "test_must_fail" in tutorial: + # Failed succesfully! + color = colorama.Fore.GREEN + logger.info(color + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") # Overall summary log logger.info(f"cosipy version: {cosipy.__version__}") @@ -291,7 +295,11 @@ def run_tutorial(tutorial): if succeeded: logger.info(colorama.Fore.GREEN + "SUCCEEDED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") else: - logger.info(colorama.Fore.RED + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") + color = colorama.Fore.RED + if "test_must_fail" in tutorial: + # Failed succesfully! + color = colorama.Fore.GREEN + logger.info(color + "FAILED " + colorama.Style.RESET_ALL + f"({elapsed:.1f} s) {tutorial}") if __name__ == "__main__": From beafef096059b585c41ec87678d4e865bdba25f1 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 14 Apr 2025 21:07:47 -0400 Subject: [PATCH 006/133] Fix file checksum in ts map tutorial Signed-off-by: Israel Martinez --- docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb b/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb index 4a3c31a2..fb29de59 100644 --- a/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb +++ b/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb @@ -704,7 +704,7 @@ "\n", "GRB_signal_path = data_dir/\"grb_binned_data.hdf5\"\n", "# download GRB signal file ~76.90 KB\n", - "fetch_wasabi_file(\"COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5\", GRB_signal_path, checksum = 'fce391a4b45624b25552c7d111945f60')\n", + "fetch_wasabi_file(\"COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5\", GRB_signal_path, checksum = 'fcf7022369b6fb378d67b780fc4b5db8')\n", "\n", "background_path = data_dir/\"bkg_binned_data_local.hdf5\"\n", "# download background file ~255.97 MB\n", From 122d4ad06f978023ef93e19d05ca3756933e3b26 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 14 Apr 2025 21:25:52 -0400 Subject: [PATCH 007/133] Catch nb failure exception to allow notebook to still be saved and clean log file handler Signed-off-by: Israel Martinez --- docs/tutorials/run_tutorials.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/docs/tutorials/run_tutorials.py b/docs/tutorials/run_tutorials.py index 0a71da23..dff898e0 100755 --- a/docs/tutorials/run_tutorials.py +++ b/docs/tutorials/run_tutorials.py @@ -3,6 +3,8 @@ import logging import traceback +from nbclient.exceptions import CellExecutionError + logging.basicConfig(format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) @@ -213,6 +215,7 @@ def run_tutorial(tutorial): os.symlink(local_copy, wdir/local_copy.name) # Run + failed = False if not args.dry: for notebook in notebooks: source_nb_path = config.absolute_path(notebook) @@ -232,10 +235,22 @@ def run_tutorial(tutorial): logger.info(f"Executing notebook {source_nb_path}...") start_time = timeit.default_timer() ep = ExecutePreprocessor(timeout=config['globals:timeout'], kernel_name=config['globals:kernel']) - ep_out = ep.preprocess(nb, {'metadata': {'path': str(wdir)}}) + + try: + ep_out = ep.preprocess(nb, {'metadata': {'path': str(wdir)}}) + except CellExecutionError as e: + # Will re-raise after output and cleaning + cell_exception = e + failed = True + elapsed = timeit.default_timer() - start_time - logger.info(f"Notebook {source_nb_path} took {elapsed} seconds to finish.") + if failed: + logger.error(f"Notebook {source_nb_path} failed after {elapsed} seconds") + else: + logger.info(f"Notebook {source_nb_path} took {elapsed} seconds to finish.") + + # Save output nb_exec_path = nb_path.with_name(nb_path.stem + "_executed" + nb_path.suffix) with open(nb_exec_path, 'w', encoding='utf-8') as exec_nb_file: nbformat.write(nb, exec_nb_file) @@ -250,6 +265,10 @@ def run_tutorial(tutorial): # Remove file logger logger.removeHandler(file_handler) + # Re-raise if failed + if failed: + raise cell_exception + # Loop through each tutorial summary = {} for tutorial in tutorials: From 9f7b717a38322c3bc36f42f9412d4499c19f4251 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 14:10:12 -0400 Subject: [PATCH 008/133] Initial take at new interfaces needed by COSILike Signed-off-by: Israel Martinez --- cosipy/interfaces/__init__.py | 8 +++ cosipy/interfaces/background_interface.py | 69 +++++++++++++++++++ cosipy/interfaces/data_interface.py | 31 +++++++++ cosipy/interfaces/expectation_interface.py | 24 +++++++ cosipy/interfaces/likelihood_interface.py | 37 ++++++++++ cosipy/interfaces/measurements.py | 7 ++ .../interfaces/source_response_interface.py | 27 ++++++++ cosipy/statistics/__init__.py | 1 + cosipy/statistics/likelihood_functions.py | 11 +++ tests/interfaces/__init__.py | 0 tests/interfaces/test_background_interface.py | 21 ++++++ 11 files changed, 236 insertions(+) create mode 100644 cosipy/interfaces/__init__.py create mode 100644 cosipy/interfaces/background_interface.py create mode 100644 cosipy/interfaces/data_interface.py create mode 100644 cosipy/interfaces/expectation_interface.py create mode 100644 cosipy/interfaces/likelihood_interface.py create mode 100644 cosipy/interfaces/measurements.py create mode 100644 cosipy/interfaces/source_response_interface.py create mode 100644 cosipy/statistics/__init__.py create mode 100644 cosipy/statistics/likelihood_functions.py create mode 100644 tests/interfaces/__init__.py create mode 100644 tests/interfaces/test_background_interface.py diff --git a/cosipy/interfaces/__init__.py b/cosipy/interfaces/__init__.py new file mode 100644 index 00000000..203c3353 --- /dev/null +++ b/cosipy/interfaces/__init__.py @@ -0,0 +1,8 @@ +from .measurements import * +from .data_interface import * +from .background_interface import * +from .likelihood_interface import * +from .expectation_interface import * +from .source_response_interface import * + + diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py new file mode 100644 index 00000000..6f125cd3 --- /dev/null +++ b/cosipy/interfaces/background_interface.py @@ -0,0 +1,69 @@ +from typing import Protocol, runtime_checkable, Dict, Any +import histpy +import numpy as np + +import logging +logger = logging.getLogger(__name__) + +from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface +from .measurements import Measurements + +__all__ = [ + "BackgroundInterface", + "BinnedBackgroundInterface", + "UnbinnedBackgroundInterface", + "NullBackground", + ] + +@runtime_checkable +class BackgroundInterface(Protocol): + def set_parameters(self, **params:Dict[str, Any]) -> None:... + @property + def parameters(self) -> Dict[str, Any]:... + +class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface): + """ + No new methods, just the inherited one + """ + +class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface): + """ + No new methods, just the inherited one + """ + +# Null background singleton +# It has not parameters and it always returns 0s +# It can be checked like a None. e.g. +# if bkg is not NullBackground: +# expectation += bkg.get_expectation() +# Which should work even without the if, but the if +# allows to avoid a potentially (is it?) lenghty operation. +class _NullBackground(BinnedBackgroundInterface, UnbinnedBackgroundInterface): + # All ways to instantiate this class should return the same object + # The singleton instant will be define later, following the class + # definition + def __new__(cls, *args, **kwargs): + return NullBackground + def __copy__(self): + return NullBackground + def __deepcopy__(self, memo): + return NullBackground + def __call__(self): + # This allows to use either NullBackground or NullBackground() + # NullBackground is NullBackground() == True + return NullBackground + # Implement all method from binned and unbinned background + # The results are all )'s + @property + def parameters(self): return {} + def set_parameters(self, **params:Dict[str, Any]) -> None: pass + def set_binning(self, axes:histpy.Axes) -> None: pass + def get_expectation(self)->histpy.Histogram: pass + def get_ncounts(self): return 0. + def get_probability(self, measurements:Measurements): return np.broadcast_to(0., measurements.size) + +# Instantiate *the* NullBackground singleton +try: + NullBackground +except NameError: + NullBackground = object.__new__(_NullBackground) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py new file mode 100644 index 00000000..4b6988fd --- /dev/null +++ b/cosipy/interfaces/data_interface.py @@ -0,0 +1,31 @@ +from typing import Protocol, runtime_checkable, Dict, Type, Any + +from .measurements import Measurements + +import histpy + +__all__ = ["DataInterface", + "UnbinnedDataInterface", + "BinnedDataInterface"] + +@runtime_checkable +class DataInterface(Protocol): + """ + Not much... + """ + +class UnbinnedDataInterface(DataInterface): + @property + def measurements(self) -> Measurements:... + @property + def measurement_types(self) -> Dict[str, Type[Any]]:... + +class BinnedDataInterface(DataInterface): + @property + def axes(self) -> histpy.Axes:... + @property + def data(self) -> histpy.Histogram:... + + + + diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py new file mode 100644 index 00000000..50720b1d --- /dev/null +++ b/cosipy/interfaces/expectation_interface.py @@ -0,0 +1,24 @@ +from typing import Protocol, runtime_checkable, Dict, Any + +import histpy +import numpy as np + +from .measurements import Measurements + +__all__ = [ + "UnbinnedExpectationInterface", + "BinnedExpectationInterface" + ] + +@runtime_checkable +class BinnedExpectationInterface(Protocol): + def set_binning(self, axes:histpy.Axes) -> None:... + def get_expectation(self)->histpy.Histogram:... + +@runtime_checkable +class UnbinnedExpectationInterface(Protocol): + def get_ncounts(self) -> float:... + def get_probability(self, measurements:Measurements) -> np.ndarray:... + + + diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py new file mode 100644 index 00000000..a2af3e26 --- /dev/null +++ b/cosipy/interfaces/likelihood_interface.py @@ -0,0 +1,37 @@ +from typing import Protocol, runtime_checkable + +__all__ = ['LikelihoodInterface', + 'BinnedLikelihoodInterface', + 'UnbinnedLikelihoodInterface'] + +from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface +from .data_interface import UnbinnedDataInterface, BinnedDataInterface +from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface + +@runtime_checkable +class LikelihoodInterface(Protocol): + def get_log_like(self) -> float:... + @property + def nobservations(self) -> int: + """For BIC and other statistics""" + +class BinnedLikelihoodInterface(LikelihoodInterface): + """ + Needs to check that data, response and bkg are compatible + """ + def __init__(self, + data: BinnedDataInterface, + response: BinnedExpectationInterface, + bkg: BinnedBackgroundInterface, + *args, **kwargs):... + +class UnbinnedLikelihoodInterface(LikelihoodInterface): + """ + Needs to check that data, response and bkg are compatible + """ + def __init__(self, + data: UnbinnedDataInterface, + response: UnbinnedExpectationInterface, + bkg: UnbinnedBackgroundInterface, + *args, **kwargs):... + diff --git a/cosipy/interfaces/measurements.py b/cosipy/interfaces/measurements.py new file mode 100644 index 00000000..cbfec53c --- /dev/null +++ b/cosipy/interfaces/measurements.py @@ -0,0 +1,7 @@ + +class Measurements: + # Dummy for now + @property + def size(self) -> int: + return 10 + diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py new file mode 100644 index 00000000..951fb6bc --- /dev/null +++ b/cosipy/interfaces/source_response_interface.py @@ -0,0 +1,27 @@ +from typing import Protocol, runtime_checkable +import threeML + +from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface + +__all__ = ["SourceResponseInterface", + "ThreeMLSourceResponseInterface", + "ThreeMLUnbinnedSourceResponseInterface", + "ThreeMLBinnedSourceResponseInterface"] + +@runtime_checkable +class SourceResponseInterface(Protocol): + ... + +class ThreeMLSourceResponseInterface(SourceResponseInterface): + def set_model(self, model: threeML.Model): ... + + +class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface): + """ + No new methods. Just the inherited ones. + """ + +class ThreeMLBinnedSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface): + """ + No new methods. Just the inherited ones. + """ diff --git a/cosipy/statistics/__init__.py b/cosipy/statistics/__init__.py new file mode 100644 index 00000000..cc6ae409 --- /dev/null +++ b/cosipy/statistics/__init__.py @@ -0,0 +1 @@ +from .likelihood_functions import * \ No newline at end of file diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py new file mode 100644 index 00000000..39544740 --- /dev/null +++ b/cosipy/statistics/likelihood_functions.py @@ -0,0 +1,11 @@ +from cosipy.interfaces import BinnedLikelihoodInterface, UnbinnedLikelihoodInterface + +__all__ = ['UnbinnedLikelihood', + 'PoissonLikelihood'] + +class UnbinnedLikelihood(UnbinnedLikelihoodInterface): + ... + +class PoissonLikelihood(BinnedLikelihoodInterface): + ... + diff --git a/tests/interfaces/__init__.py b/tests/interfaces/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/interfaces/test_background_interface.py b/tests/interfaces/test_background_interface.py new file mode 100644 index 00000000..6a0831c1 --- /dev/null +++ b/tests/interfaces/test_background_interface.py @@ -0,0 +1,21 @@ +from cosipy.interfaces import (NullBackground, + BackgroundInterface, + BinnedBackgroundInterface, + UnbinnedBackgroundInterface + ) + +def test_null_background(): + null_1 = NullBackground() + null_2 = NullBackground + null_3 = NullBackground + + assert null_1 is null_2 + assert null_2 is null_3 + assert null_3 is null_1 + assert isinstance(null_1, BackgroundInterface) + assert isinstance(null_2, BinnedBackgroundInterface) + assert isinstance(null_3, UnbinnedBackgroundInterface) + + class RandomClass: pass + + assert not isinstance(null_1, RandomClass) From 3101a1e02917bd038f2fb922f46bdf3a54c22f5c Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 14:23:55 -0400 Subject: [PATCH 009/133] COSILike implementation using interfaces. WiP Signed-off-by: Israel Martinez --- cosipy/threeml/COSILike.py | 403 ++++++------------------------------- 1 file changed, 61 insertions(+), 342 deletions(-) diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index 6f67de30..9b1b12fb 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -1,363 +1,82 @@ -from threeML import PluginPrototype -from threeML.minimizer import minimization -from threeML.config.config import threeML_config -from threeML.exceptions.custom_exceptions import FitFailed -from astromodels import Parameter - -from cosipy.response.FullDetectorResponse import FullDetectorResponse -from cosipy.response.ExtendedSourceResponse import ExtendedSourceResponse -from cosipy.image_deconvolution import AllSkyImageModel - -from scoords import SpacecraftFrame, Attitude - -from mhealpy import HealpixMap - -from cosipy.response import PointSourceResponse, DetectorResponse -from histpy import Histogram -import h5py as h5 -from histpy import Axis, Axes -import sys - -import astropy.units as u -import astropy.coordinates as coords - -from sparse import COO - -import numpy as np - -from scipy.special import factorial - -import collections - -import copy - -import logging -logger = logging.getLogger(__name__) - -import inspect +from typing import Dict +from threeML import PluginPrototype, Parameter +from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood +from cosipy.interfaces import (DataInterface, + ThreeMLSourceResponseInterface, + BackgroundInterface, + LikelihoodInterface) class COSILike(PluginPrototype): - """ - COSI 3ML plugin. - - Parameters - ---------- - name : str - Plugin name e.g. "cosi". Needs to have a distinct name with respect to other plugins in the same analysis - dr : str - Path to full detector response - data : histpy.Histogram - Binned data. Note: Eventually this should be a cosipy data class - bkg : histpy.Histogram - Binned background model. Note: Eventually this should be a cosipy data class - sc_orientation : cosipy.spacecraftfile.SpacecraftFile - Contains the information of the orientation: timestamps (astropy.Time) and attitudes (scoord.Attitude) that describe - the spacecraft for the duration of the data included in the analysis - nuisance_param : astromodels.core.parameter.Parameter, optional - Background parameter - coordsys : str, optional - Coordinate system ('galactic' or 'spacecraftframe') to perform fit in, which should match coordinate system of data - and background. This only needs to be specified if the binned data and background do not have a coordinate system - attached to them - precomputed_psr_file : str, optional - Full path to precomputed point source response in Galactic coordinates - earth_occ : bool, optional - Option to include Earth occultation in fit (default is True). - """ - def __init__(self, name, dr, data, bkg, sc_orientation, - nuisance_param=None, coordsys=None, precomputed_psr_file=None, earth_occ=True, **kwargs): - - # create the hash for the nuisance parameters. We have none for now. - self._nuisance_parameters = collections.OrderedDict() - - # call the prototype constructor. Boilerplate. - super(COSILike, self).__init__(name, self._nuisance_parameters) - # User inputs needed to compute the likelihood - self._name = name - self._rsp_path = dr - self._dr = FullDetectorResponse.open(dr) - self._data = data - self._bkg = bkg - self._sc_orientation = sc_orientation - self.earth_occ = earth_occ - - try: - if data.axes["PsiChi"].coordsys.name != bkg.axes["PsiChi"].coordsys.name: - raise RuntimeError("Data is binned in " + data.axes["PsiChi"].coordsys.name + " and background is binned in " - + bkg.axes["PsiChi"].coordsys.name + ". They should be binned in the same coordinate system.") - else: - self._coordsys = data.axes["PsiChi"].coordsys.name - except: - if coordsys == None: - raise RuntimeError(f"There is no coordinate system attached to the binned data. One must be provided by " - f"specifiying coordsys='galactic' or 'spacecraftframe'") - else: - self._coordsys = coordsys - - # Place-holder for cached data. - self._model = None - self._source = None - self._psr = None - self._signal = None - self._expected_counts = None - - # Set to fit nuisance parameter if given by user - if nuisance_param == None: - self.set_inner_minimization(False) - elif isinstance(nuisance_param, Parameter): - self.set_inner_minimization(True) - self._bkg_par = nuisance_param - self._nuisance_parameters[self._bkg_par.name] = self._bkg_par - self._nuisance_parameters[self._bkg_par.name].free = self._fit_nuisance_params - else: - raise RuntimeError("Nuisance parameter must be astromodels.core.parameter.Parameter object") - - # Option to use precomputed point source response. - # Note: this still needs to be implemented in a - # consistent way for point srcs and extended srcs. - self.precomputed_psr_file = precomputed_psr_file - if self.precomputed_psr_file != None: - logger.info("... loading the pre-computed image response ...") - self.image_response = ExtendedSourceResponse.open(self.precomputed_psr_file) - logger.info("--> done") - - def set_model(self, model): + def __init__(self, + name, + data: DataInterface, + response: ThreeMLSourceResponseInterface, + bkg: BackgroundInterface, + likelihood = 'poisson'): """ - Set the model to be used in the joint minimization. - + Parameters ---------- - model : astromodels.core.model.Model - Any model supported by astromodels + name + data + response + bkg + likefun: str or LikelihoodInterface (Use at your own risk. make sure uses data, response and bkg) """ - - # Temporary fix to only print log-likelihood warning once max per fit - if inspect.stack()[1][3] == '_assign_model_to_data': - self._printed_warning = False - - # Get point sources and extended sources from model: - point_sources = model.point_sources - extended_sources = model.extended_sources - - # Source counter for models with multiple sources: - self.src_counter = 0 - - # Get expectation for extended sources: - - # Save expected counts for each source, - # in order to enable easy plotting after likelihood scan: - if self._expected_counts == None: - self._expected_counts = {} - - for name,source in extended_sources.items(): - - # Set spectrum: - # Note: the spectral parameters are updated internally by 3ML - # during the likelihood scan. - - # Get expectation using precomputed psr in Galactic coordinates: - total_expectation = self.image_response.get_expectation_from_astromodel(source) - - # Save expected counts for source: - self._expected_counts[name] = copy.deepcopy(total_expectation) - - # Need to check if self._signal type is dense (i.e. 'Quantity') or sparse (i.e. 'COO'). - if type(total_expectation.contents) == u.quantity.Quantity: - total_expectation = total_expectation.contents.value - elif type(total_expectation.contents) == COO: - total_expectation = total_expectation.contents.todense() - else: - raise RuntimeError("Expectation is an unknown object") - - # Add source to signal and update source counter: - if self.src_counter == 0: - self._signal = total_expectation - if self.src_counter != 0: - self._signal += total_expectation - self.src_counter += 1 - - # Initialization - # probably it is better that this part be outside of COSILike (HY). - if len(point_sources) != 0: - - if self._psr is None or len(point_sources) != len(self._psr): - - logger.info("... Calculating point source responses ...") - - self._psr = {} - self._source_location = {} # Should the poition information be in the point source response? (HY) - for name, source in point_sources.items(): - coord = source.position.sky_coord - - self._source_location[name] = copy.deepcopy(coord) # to avoid same memory issue - - if self._coordsys == 'spacecraftframe': - dwell_time_map = self._get_dwell_time_map(coord) - self._psr[name] = self._dr.get_point_source_response(exposure_map=dwell_time_map) - elif self._coordsys == 'galactic': - scatt_map = self._get_scatt_map(coord) - self._psr[name] = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) - else: - raise RuntimeError("Unknown coordinate system") - - logger.info(f"--> done (source name : {name})") - - logger.info(f"--> all done") - - # check if the source location is updated or not - for name, source in point_sources.items(): - - if source.position.sky_coord != self._source_location[name]: - logger.info(f"... Re-calculating the point source response of {name} ...") - coord = source.position.sky_coord - - self._source_location[name] = copy.deepcopy(coord) # to avoid same memory issue - - if self._coordsys == 'spacecraftframe': - dwell_time_map = self._get_dwell_time_map(coord) - self._psr[name] = self._dr.get_point_source_response(exposure_map=dwell_time_map) - elif self._coordsys == 'galactic': - scatt_map = self._get_scatt_map() - self._psr[name] = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) - else: - raise RuntimeError("Unknown coordinate system") - - logger.info(f"--> done (source name : {name})") + self._name = name - # Get expectation for point sources: - for name,source in point_sources.items(): + class ThreeMLBackgroundWrapper: + """ + Translate background porameters to 3ml Parameter dict + """ + + def __init__(self, bkg: BackgroundInterface): + self.bkg = bkg + + def set_parameters(self, parameters: Dict[str, Parameter]): + # Translate self.bkg.set_parameters + ... + + @property + def parameters(self) -> Dict[str, Parameter]: + ## Translate self.bkg.parameters + ... + + self._bkg = ThreeMLBackgroundWrapper(bkg) + self._response = response + + if isinstance(likelihood, LikelihoodInterface): + # Use user's likelihood at their own risk + self._like = likelihood + elif likelihood == 'poisson': + self._like = PoissonLikelihood(data, response, bkg) + elif likelihood == 'unbinned': + self._like = UnbinnedLikelihood(data, response, bkg) + else: + raise ValueError(f"Likelihood function \"{likelihood}\" not supported") - # Convolve with spectrum - # See also the Detector Response and Source Injector tutorials - spectrum = source.spectrum.main.shape + @property + def nuisance_parameters(self) -> Dict[str, Parameter]: + return self._bkg.parameters - total_expectation = self._psr[name].get_expectation(spectrum) - - # Save expected counts for source: - self._expected_counts[name] = copy.deepcopy(total_expectation) - - # Need to check if self._signal type is dense (i.e. 'Quantity') or sparse (i.e. 'COO'). - if type(total_expectation.contents) == u.quantity.Quantity: - total_expectation = total_expectation.project(['Em', 'Phi', 'PsiChi']).contents.value - elif type(total_expectation.contents) == COO: - total_expectation = total_expectation.project(['Em', 'Phi', 'PsiChi']).contents.todense() - else: - raise RuntimeError("Expectation is an unknown object") + def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): + self._bkg.set_parameters(new_nuisance_parameters) - # Add source to signal and update source counter: - if self.src_counter == 0: - self._signal = total_expectation - if self.src_counter != 0: - self._signal += total_expectation - self.src_counter += 1 + def get_number_of_data_points(self) -> int: + return self._like.nobservations - # Cache - self._model = model + def set_model(self, model): + self._response.set_model(model) def get_log_like(self): - """ - Calculate the log-likelihood. - - Returns - ---------- - log_like : float - Value of the log-likelihood - """ - - # Recompute the expectation if any parameter in the model changed - if self._model is None: - log.error("You need to set the model first") - - # Set model: - self.set_model(self._model) - - # Compute expectation including free background parameter: - if self._fit_nuisance_params: - if type(self._bkg.contents) == COO: - expectation = self._signal + self._nuisance_parameters[self._bkg_par.name].value * self._bkg.contents.todense() - else: - expectation = self._signal + self._nuisance_parameters[self._bkg_par.name].value * self._bkg.contents - - # Compute expectation without background parameter: - else: - if type(self._bkg.contents) == COO: - expectation = self._signal + self._bkg.contents.todense() - else: - expectation = self._signal + self._bkg.contents + return self._like.get_log_like() - expectation += 1e-12 # to avoid -infinite log-likelihood (occurs when expected counts = 0 but data != 0) - if not self._printed_warning: - logger.warning("Adding 1e-12 to each bin of the expectation to avoid log-likelihood = -inf.") - self._printed_warning = True - # This 1e-12 should be defined as a parameter in the near future (HY) - - # Convert data into an arrary: - data = self._data.contents - - # Compute the log-likelihood: - log_like = np.nansum(data*np.log(expectation) - expectation) - - return log_like - def inner_fit(self): """ Required for 3ML fit. - """ - - return self.get_log_like() - - def _get_dwell_time_map(self, coord): - """ - Get the dwell time map of the source in the inertial (spacecraft) frame. - - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - Coordinates of the target source - - Returns - ------- - dwell_time_map : mhealpy.containers.healpix_map.HealpixMap - Dwell time map - """ - - self._sc_orientation.get_target_in_sc_frame(target_name = self._name, target_coord = coord) - dwell_time_map = self._sc_orientation.get_dwell_map(response = self._rsp_path) - - return dwell_time_map - - def _get_scatt_map(self, coord): - """ - Get the spacecraft attitude map of the source in the inertial (spacecraft) frame. - - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - The coordinates of the target object. - Returns - ------- - scatt_map : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap + Maybe in the future use fast norm fit to minimize the background normalization """ - - scatt_map = self._sc_orientation.get_scatt_map(nside = self._dr.nside * 2, target_coord = coord, - coordsys = 'galactic', earth_occ = self.earth_occ) - - return scatt_map - - def set_inner_minimization(self, flag: bool): - """ - Turn on the minimization of the internal COSI (nuisance) parameters. - - Parameters - ---------- - flag : bool - Turns on and off the minimization of the internal parameters - """ - - self._fit_nuisance_params: bool = bool(flag) - - for parameter in self._nuisance_parameters: - self._nuisance_parameters[parameter].free = self._fit_nuisance_params + return self.get_log_like() From 2eb9a2f35d0b91df0038eba041cbae491e38347d Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 17:45:45 -0400 Subject: [PATCH 010/133] Some more changes Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 41 ++++++- cosipy/interfaces/data_interface.py | 4 - cosipy/interfaces/expectation_interface.py | 9 +- .../interfaces/source_response_interface.py | 4 +- cosipy/statistics/likelihood_functions.py | 37 +++++- cosipy/threeml/COSILike.py | 41 +++---- .../scratch_interfaces_signatures.py | 105 ++++++++++++++++++ 7 files changed, 201 insertions(+), 40 deletions(-) create mode 100644 docs/api/interfaces/scratch_interfaces_signatures.py diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 6f125cd3..acf0ff8d 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -3,6 +3,9 @@ import numpy as np import logging + +from astromodels import Parameter + logger = logging.getLogger(__name__) from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface @@ -12,6 +15,9 @@ "BackgroundInterface", "BinnedBackgroundInterface", "UnbinnedBackgroundInterface", + "ThreeMLBackgroundInterface", + "ThreeMLBinnedBackgroundInterface", + "ThreeMLUnbinnedBackgroundInterface", "NullBackground", ] @@ -21,16 +27,44 @@ def set_parameters(self, **params:Dict[str, Any]) -> None:... @property def parameters(self) -> Dict[str, Any]:... +class ThreeMLBackgroundInterface(BackgroundInterface): + """ + This must translate to/from regular parameters + with arbitrary type from/to 3ML parameters + """ + def set_threeml_parameters(self, **parameters: Dict[str, Parameter]): + """ + Must call set_parameters(), and keep track of all the Parameter property (e.g. bounds) + """ + @property + def threeml_parameters(self)->Dict[str, Parameter]: + """ + Note than we need more information (e.g. bounds) than what you + get from base parameters property + """ + return {} # Silence warning + class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface): """ No new methods, just the inherited one """ +class ThreeMLBinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface): + """ + No new methods, just the inherited one + """ + class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface): """ No new methods, just the inherited one """ +class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface): + """ + No new methods, just the inherited one + """ + + # Null background singleton # It has not parameters and it always returns 0s # It can be checked like a None. e.g. @@ -57,10 +91,9 @@ def __call__(self): @property def parameters(self): return {} def set_parameters(self, **params:Dict[str, Any]) -> None: pass - def set_binning(self, axes:histpy.Axes) -> None: pass - def get_expectation(self)->histpy.Histogram: pass - def get_ncounts(self): return 0. - def get_probability(self, measurements:Measurements): return np.broadcast_to(0., measurements.size) + def expectation(self)->histpy.Histogram: pass + def ncounts(self): return 0. + def probability(self, measurements:Measurements): return np.broadcast_to(0., measurements.size) # Instantiate *the* NullBackground singleton try: diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 4b6988fd..6249458c 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -17,12 +17,8 @@ class DataInterface(Protocol): class UnbinnedDataInterface(DataInterface): @property def measurements(self) -> Measurements:... - @property - def measurement_types(self) -> Dict[str, Type[Any]]:... class BinnedDataInterface(DataInterface): - @property - def axes(self) -> histpy.Axes:... @property def data(self) -> histpy.Histogram:... diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 50720b1d..90a0d8a2 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -12,13 +12,14 @@ @runtime_checkable class BinnedExpectationInterface(Protocol): - def set_binning(self, axes:histpy.Axes) -> None:... - def get_expectation(self)->histpy.Histogram:... + @property + def expectation(self)->histpy.Histogram:... @runtime_checkable class UnbinnedExpectationInterface(Protocol): - def get_ncounts(self) -> float:... - def get_probability(self, measurements:Measurements) -> np.ndarray:... + @property + def ncounts(self) -> float:... + def probability(self, measurements:Measurements) -> np.ndarray:... diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 951fb6bc..f673466a 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -1,5 +1,5 @@ from typing import Protocol, runtime_checkable -import threeML +from astromodels import Model from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface @@ -13,7 +13,7 @@ class SourceResponseInterface(Protocol): ... class ThreeMLSourceResponseInterface(SourceResponseInterface): - def set_model(self, model: threeML.Model): ... + def set_model(self, model: Model): ... class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface): diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 39544740..3f605819 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -1,4 +1,11 @@ -from cosipy.interfaces import BinnedLikelihoodInterface, UnbinnedLikelihoodInterface +from cosipy.interfaces import (BinnedLikelihoodInterface, + UnbinnedLikelihoodInterface, + BinnedDataInterface, + BinnedExpectationInterface, + BinnedBackgroundInterface, + NullBackground) + +import numpy as np __all__ = ['UnbinnedLikelihood', 'PoissonLikelihood'] @@ -7,5 +14,31 @@ class UnbinnedLikelihood(UnbinnedLikelihoodInterface): ... class PoissonLikelihood(BinnedLikelihoodInterface): - ... + def __init__(self, + data: BinnedDataInterface, + response: BinnedExpectationInterface, + bkg: BinnedBackgroundInterface, + *args, **kwargs): + + self._data = data + self._bkg = bkg + self._response = response + + def get_log_like(self) -> float: + + # Compute expectation including background + expectation = self._response.expectation + + if self._bkg is not NullBackground: + expectation = expectation + self._bkg.expectation + + # Get the arrays + expectation = expectation.contents + data = self._data.data.contents + + # Compute the log-likelihood: + log_like = np.nansum(data * np.log(expectation) - expectation) + + return log_like + diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index 9b1b12fb..e5f001ac 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -3,8 +3,8 @@ from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood from cosipy.interfaces import (DataInterface, ThreeMLSourceResponseInterface, - BackgroundInterface, - LikelihoodInterface) + ThreeMLBackgroundInterface, + LikelihoodInterface, ThreeMLBinnedBackgroundInterface) class COSILike(PluginPrototype): @@ -12,7 +12,7 @@ def __init__(self, name, data: DataInterface, response: ThreeMLSourceResponseInterface, - bkg: BackgroundInterface, + bkg: ThreeMLBackgroundInterface, likelihood = 'poisson'): """ @@ -25,26 +25,14 @@ def __init__(self, likefun: str or LikelihoodInterface (Use at your own risk. make sure uses data, response and bkg) """ - self._name = name + # PluginPrototype.__init__ does the following: + # Sets _name = name + # Sets _tag = None + # Set self._nuisance_parameters, which we do not use because + # we're overriding nuisance_parameters() and update_nuisance_parameters() + super().__init__(name, {}) - class ThreeMLBackgroundWrapper: - """ - Translate background porameters to 3ml Parameter dict - """ - - def __init__(self, bkg: BackgroundInterface): - self.bkg = bkg - - def set_parameters(self, parameters: Dict[str, Parameter]): - # Translate self.bkg.set_parameters - ... - - @property - def parameters(self) -> Dict[str, Parameter]: - ## Translate self.bkg.parameters - ... - - self._bkg = ThreeMLBackgroundWrapper(bkg) + self._bkg = bkg self._response = response if isinstance(likelihood, LikelihoodInterface): @@ -59,10 +47,15 @@ def parameters(self) -> Dict[str, Parameter]: @property def nuisance_parameters(self) -> Dict[str, Parameter]: - return self._bkg.parameters + # Add plugin name, required by 3ML code + # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 + return {self._name + "_" + l:p for l,p in self._bkg.threeml_parameters.items()} def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): - self._bkg.set_parameters(new_nuisance_parameters) + # Remove plugin name. Opposite of the nuisance_parameters property + new_nuisance_parameters = {l[len(self._name)+1:]:p for l,p in new_nuisance_parameters.items()} + + self._bkg.set_threeml_parameters(**new_nuisance_parameters) def get_number_of_data_points(self) -> int: return self._like.nobservations diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py new file mode 100644 index 00000000..e0f8c90a --- /dev/null +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -0,0 +1,105 @@ +from typing import Dict, Any + +from astromodels import Model, Parameter + +from cosipy.threeml import COSILike +from cosipy.interfaces import BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface +from histpy import Axis,Histogram +import numpy as np +from scipy.stats import norm, uniform + +from matplotlib import pyplot as plt + +toy_axis = Axis(np.linspace(-5, 5)) + +class ToyData(BinnedDataInterface): + # Random data. Normal signal on opt of uniform bkg + + def __init__(self): + self._data = Histogram(toy_axis) + + # Signal + self._data.fill(norm.rvs(size = 1000)) + + # Bkg + self._data.fill(uniform.rvs(-5,10, size=1000)) + + @property + def data(self) -> Histogram: + return self._data + + +class ToyBkg(ThreeMLBinnedBackgroundInterface): + def __init__(self): + self._unit_expectation = Histogram(toy_axis) + self._unit_expectation[:] = 1/self._unit_expectation.nbins + self._norm = 1 + self._threeml_parameters = {'bkg_norm': Parameter('bkg_norm', self._norm)} + + def set_parameters(self, bkg_norm) -> None: + self._norm = bkg_norm + + @property + def parameters(self) -> Dict[str, Any]: + return {'bkg_norm':self._norm} + + @property + def expectation(self)->Histogram: + return self._norm * self._unit_expectation + + @property + def threeml_parameters(self) -> Dict[str, Parameter]: + return self._threeml_parameters + + def set_threeml_parameters(self, bkg_norm: Parameter, **kwargs): + self._threeml_parameters['bkg_norm'] = bkg_norm + self.set_parameters(bkg_norm = bkg_norm.value) + +class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): + + def __init__(self): + self._model = None + self._unit_expectation = Histogram(toy_axis, + contents = np.diff(norm.cdf(toy_axis.edges))) + + def set_model(self, model: Model): + self._flux = model.sources['source'].spectrum.main.shape.k + + @property + def expectation(self)->Histogram: + print(self._flux.value) + return self._unit_expectation*self._flux.value + +data = ToyData() +bkg = ToyBkg() +bkg.set_threeml_parameters(bkg_norm = Parameter('bkg_norm', 1000, + min_value=0, max_value = 100000, + delta = 0.01)) +response = ToySourceResponse() + +## 3Ml model +## We'll just use the K value in u.cm / u.cm / u.s / u.keV +from threeML import Constant, PointSource, Model, JointLikelihood, DataList +spectrum = Constant(k = 1000) +spectrum.k.min_value = 0 +spectrum.k.max_value = 100000 +spectrum.k.delta = 1 +source = PointSource("source", # arbitrary, but needs to be unique + l = 0, b = 0, # Doesn't matter + spectral_shape = spectrum) + +model = Model(source) + +cosi = COSILike('cosi', data, response, bkg) + +fig,ax = plt.subplots() +cosi.set_model(model) +data.data.plot(ax) +(bkg.expectation + response.expectation).plot(ax) +plt.show() + +plugins = DataList(cosi) + +like = JointLikelihood(model, plugins, verbose = True) + +like.fit() From d6521ca02b797d678a8d21f8d2b5c8c11064f8ca Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 22:23:47 -0400 Subject: [PATCH 011/133] Toy example working Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 13 +++- cosipy/interfaces/expectation_interface.py | 3 +- .../interfaces/source_response_interface.py | 7 +- cosipy/statistics/likelihood_functions.py | 8 +- .../scratch_interfaces_signatures.py | 77 +++++++++++-------- 5 files changed, 69 insertions(+), 39 deletions(-) diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index acf0ff8d..89bc3efe 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -31,10 +31,12 @@ class ThreeMLBackgroundInterface(BackgroundInterface): """ This must translate to/from regular parameters with arbitrary type from/to 3ML parameters + """ def set_threeml_parameters(self, **parameters: Dict[str, Parameter]): """ - Must call set_parameters(), and keep track of all the Parameter property (e.g. bounds) + The Parameter objects are passed "as reference", and can change. + Remember to call set_parameters() before computing the expetation """ @property def threeml_parameters(self)->Dict[str, Parameter]: @@ -72,7 +74,7 @@ class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackg # expectation += bkg.get_expectation() # Which should work even without the if, but the if # allows to avoid a potentially (is it?) lenghty operation. -class _NullBackground(BinnedBackgroundInterface, UnbinnedBackgroundInterface): +class _NullBackground(ThreeMLBinnedBackgroundInterface, ThreeMLUnbinnedBackgroundInterface): # All ways to instantiate this class should return the same object # The singleton instant will be define later, following the class # definition @@ -90,9 +92,14 @@ def __call__(self): # The results are all )'s @property def parameters(self): return {} + @property + def threeml_parameters(self) ->Dict[str, Parameter]: return {} def set_parameters(self, **params:Dict[str, Any]) -> None: pass - def expectation(self)->histpy.Histogram: pass + def set_threeml_parameters(self, **parameters: Dict[str, Parameter]): pass + def expectation(self, axes:histpy.Axes)->histpy.Histogram: return histpy.Histogram(axes) + @property def ncounts(self): return 0. + @property def probability(self, measurements:Measurements): return np.broadcast_to(0., measurements.size) # Instantiate *the* NullBackground singleton diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 90a0d8a2..4e3e77dd 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -12,8 +12,7 @@ @runtime_checkable class BinnedExpectationInterface(Protocol): - @property - def expectation(self)->histpy.Histogram:... + def expectation(self, axes:histpy.Axes)->histpy.Histogram:... @runtime_checkable class UnbinnedExpectationInterface(Protocol): diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index f673466a..3dd52dae 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -13,7 +13,12 @@ class SourceResponseInterface(Protocol): ... class ThreeMLSourceResponseInterface(SourceResponseInterface): - def set_model(self, model: Model): ... + def set_model(self, model: Model): + """ + The model is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface): diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 3f605819..91deac81 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -27,10 +27,10 @@ def __init__(self, def get_log_like(self) -> float: # Compute expectation including background - expectation = self._response.expectation + expectation = self._response.expectation(self._data.data.axes) if self._bkg is not NullBackground: - expectation = expectation + self._bkg.expectation + expectation = expectation + self._bkg.expectation(self._data.data.axes) # Get the arrays expectation = expectation.contents @@ -41,4 +41,8 @@ def get_log_like(self) -> float: return log_like + @property + def nobservations(self) -> int: + return self._data.data.contents.size + diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py index e0f8c90a..7cda036a 100644 --- a/docs/api/interfaces/scratch_interfaces_signatures.py +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -3,8 +3,8 @@ from astromodels import Model, Parameter from cosipy.threeml import COSILike -from cosipy.interfaces import BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface -from histpy import Axis,Histogram +from cosipy.interfaces import NullBackground, BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface +from histpy import Axis,Axes,Histogram import numpy as np from scipy.stats import norm, uniform @@ -33,27 +33,36 @@ class ToyBkg(ThreeMLBinnedBackgroundInterface): def __init__(self): self._unit_expectation = Histogram(toy_axis) self._unit_expectation[:] = 1/self._unit_expectation.nbins - self._norm = 1 - self._threeml_parameters = {'bkg_norm': Parameter('bkg_norm', self._norm)} + self._norm = None + self._threeml_parameters = {} - def set_parameters(self, bkg_norm) -> None: - self._norm = bkg_norm + def set_parameters(self, norm) -> None: + self._norm = norm @property def parameters(self) -> Dict[str, Any]: - return {'bkg_norm':self._norm} + return {'norm':self._norm} - @property - def expectation(self)->Histogram: - return self._norm * self._unit_expectation + def expectation(self, axes:Axes)->Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + if self._norm is None: + raise RuntimeError("Set norm parameter first") + + # In case it changed + self.set_parameters(norm= self._threeml_parameters['norm'].value) + + return self._unit_expectation*self._norm @property def threeml_parameters(self) -> Dict[str, Parameter]: return self._threeml_parameters - def set_threeml_parameters(self, bkg_norm: Parameter, **kwargs): - self._threeml_parameters['bkg_norm'] = bkg_norm - self.set_parameters(bkg_norm = bkg_norm.value) + def set_threeml_parameters(self, norm: Parameter, **kwargs): + self._threeml_parameters['norm'] = norm + self.set_parameters(norm.value) class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): @@ -63,27 +72,31 @@ def __init__(self): contents = np.diff(norm.cdf(toy_axis.edges))) def set_model(self, model: Model): - self._flux = model.sources['source'].spectrum.main.shape.k + self._model = model - @property - def expectation(self)->Histogram: - print(self._flux.value) - return self._unit_expectation*self._flux.value + def expectation(self, axes:Axes)->Histogram: + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + if self._model is None: + raise RuntimeError("Set model first") + + flux = self._model.sources['source'].spectrum.main.shape.k.value + return self._unit_expectation*flux data = ToyData() bkg = ToyBkg() -bkg.set_threeml_parameters(bkg_norm = Parameter('bkg_norm', 1000, - min_value=0, max_value = 100000, - delta = 0.01)) +bkg.set_threeml_parameters(norm = Parameter('norm', 1)) + +bkg = NullBackground + response = ToySourceResponse() ## 3Ml model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV from threeML import Constant, PointSource, Model, JointLikelihood, DataList -spectrum = Constant(k = 1000) -spectrum.k.min_value = 0 -spectrum.k.max_value = 100000 -spectrum.k.delta = 1 +spectrum = Constant() +spectrum.k.value = 1 source = PointSource("source", # arbitrary, but needs to be unique l = 0, b = 0, # Doesn't matter spectral_shape = spectrum) @@ -92,14 +105,16 @@ def expectation(self)->Histogram: cosi = COSILike('cosi', data, response, bkg) -fig,ax = plt.subplots() -cosi.set_model(model) -data.data.plot(ax) -(bkg.expectation + response.expectation).plot(ax) -plt.show() - plugins = DataList(cosi) like = JointLikelihood(model, plugins, verbose = True) like.fit() + +fig,ax = plt.subplots() +data.data.plot(ax) +expectation = response.expectation(data.data.axes) +if bkg is not NullBackground: + expectation + expectation + bkg.expectation(data.data.axes) +expectation.plot(ax) +plt.show() \ No newline at end of file From f960da53724283e1a58991648390940078c52aa4 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 22:36:24 -0400 Subject: [PATCH 012/133] Remove NullBackground. A None is enough Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 44 ------------------- cosipy/statistics/likelihood_functions.py | 4 +- cosipy/threeml/COSILike.py | 11 +++-- .../scratch_interfaces_signatures.py | 18 +++++--- 4 files changed, 22 insertions(+), 55 deletions(-) diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 89bc3efe..44d17c60 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -9,7 +9,6 @@ logger = logging.getLogger(__name__) from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface -from .measurements import Measurements __all__ = [ "BackgroundInterface", @@ -18,7 +17,6 @@ "ThreeMLBackgroundInterface", "ThreeMLBinnedBackgroundInterface", "ThreeMLUnbinnedBackgroundInterface", - "NullBackground", ] @runtime_checkable @@ -65,45 +63,3 @@ class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackg """ No new methods, just the inherited one """ - - -# Null background singleton -# It has not parameters and it always returns 0s -# It can be checked like a None. e.g. -# if bkg is not NullBackground: -# expectation += bkg.get_expectation() -# Which should work even without the if, but the if -# allows to avoid a potentially (is it?) lenghty operation. -class _NullBackground(ThreeMLBinnedBackgroundInterface, ThreeMLUnbinnedBackgroundInterface): - # All ways to instantiate this class should return the same object - # The singleton instant will be define later, following the class - # definition - def __new__(cls, *args, **kwargs): - return NullBackground - def __copy__(self): - return NullBackground - def __deepcopy__(self, memo): - return NullBackground - def __call__(self): - # This allows to use either NullBackground or NullBackground() - # NullBackground is NullBackground() == True - return NullBackground - # Implement all method from binned and unbinned background - # The results are all )'s - @property - def parameters(self): return {} - @property - def threeml_parameters(self) ->Dict[str, Parameter]: return {} - def set_parameters(self, **params:Dict[str, Any]) -> None: pass - def set_threeml_parameters(self, **parameters: Dict[str, Parameter]): pass - def expectation(self, axes:histpy.Axes)->histpy.Histogram: return histpy.Histogram(axes) - @property - def ncounts(self): return 0. - @property - def probability(self, measurements:Measurements): return np.broadcast_to(0., measurements.size) - -# Instantiate *the* NullBackground singleton -try: - NullBackground -except NameError: - NullBackground = object.__new__(_NullBackground) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 91deac81..ce34b394 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -3,7 +3,7 @@ BinnedDataInterface, BinnedExpectationInterface, BinnedBackgroundInterface, - NullBackground) + ) import numpy as np @@ -29,7 +29,7 @@ def get_log_like(self) -> float: # Compute expectation including background expectation = self._response.expectation(self._data.data.axes) - if self._bkg is not NullBackground: + if self._bkg is not None: expectation = expectation + self._bkg.expectation(self._data.data.axes) # Get the arrays diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index e5f001ac..2bb38679 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -49,13 +49,16 @@ def __init__(self, def nuisance_parameters(self) -> Dict[str, Parameter]: # Add plugin name, required by 3ML code # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 - return {self._name + "_" + l:p for l,p in self._bkg.threeml_parameters.items()} + if self._bkg is None: + return {} + else: + return {self._name + "_" + l:p for l,p in self._bkg.threeml_parameters.items()} def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): # Remove plugin name. Opposite of the nuisance_parameters property - new_nuisance_parameters = {l[len(self._name)+1:]:p for l,p in new_nuisance_parameters.items()} - - self._bkg.set_threeml_parameters(**new_nuisance_parameters) + if self._bkg is not None: + new_nuisance_parameters = {l[len(self._name)+1:]:p for l,p in new_nuisance_parameters.items()} + self._bkg.set_threeml_parameters(**new_nuisance_parameters) def get_number_of_data_points(self) -> int: return self._like.nobservations diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py index 7cda036a..9988cff6 100644 --- a/docs/api/interfaces/scratch_interfaces_signatures.py +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -3,7 +3,7 @@ from astromodels import Model, Parameter from cosipy.threeml import COSILike -from cosipy.interfaces import NullBackground, BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface +from cosipy.interfaces import BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface from histpy import Axis,Axes,Histogram import numpy as np from scipy.stats import norm, uniform @@ -81,14 +81,20 @@ def expectation(self, axes:Axes)->Histogram: if self._model is None: raise RuntimeError("Set model first") - flux = self._model.sources['source'].spectrum.main.shape.k.value + sources = self._model.sources + + if len(sources) == 0: + flux = 0. + else: + flux = self._model.sources['source'].spectrum.main.shape.k.value + return self._unit_expectation*flux data = ToyData() bkg = ToyBkg() bkg.set_threeml_parameters(norm = Parameter('norm', 1)) -bkg = NullBackground +#bkg = None # Uncomment for not bkg fit response = ToySourceResponse() @@ -103,6 +109,8 @@ def expectation(self, axes:Axes)->Histogram: model = Model(source) +model = Model() # Uncomment for bkg-only hypothesis + cosi = COSILike('cosi', data, response, bkg) plugins = DataList(cosi) @@ -114,7 +122,7 @@ def expectation(self, axes:Axes)->Histogram: fig,ax = plt.subplots() data.data.plot(ax) expectation = response.expectation(data.data.axes) -if bkg is not NullBackground: - expectation + expectation + bkg.expectation(data.data.axes) +if bkg is not None: + expectation = expectation + bkg.expectation(data.data.axes) expectation.plot(ax) plt.show() \ No newline at end of file From 18418815cc06c610f5813fd3a983f6fd67685437 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Apr 2025 23:11:36 -0400 Subject: [PATCH 013/133] Make all interfaces explicit protocols Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 15 ++++++++++----- cosipy/interfaces/data_interface.py | 6 ++++-- cosipy/interfaces/likelihood_interface.py | 6 ++++-- cosipy/interfaces/source_response_interface.py | 10 ++++++---- .../interfaces/scratch_interfaces_signatures.py | 9 ++++----- 5 files changed, 28 insertions(+), 18 deletions(-) diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 44d17c60..86d0b42a 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -25,7 +25,8 @@ def set_parameters(self, **params:Dict[str, Any]) -> None:... @property def parameters(self) -> Dict[str, Any]:... -class ThreeMLBackgroundInterface(BackgroundInterface): +@runtime_checkable +class ThreeMLBackgroundInterface(BackgroundInterface, Protocol): """ This must translate to/from regular parameters with arbitrary type from/to 3ML parameters @@ -44,22 +45,26 @@ def threeml_parameters(self)->Dict[str, Parameter]: """ return {} # Silence warning -class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface): +@runtime_checkable +class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface, Protocol): """ No new methods, just the inherited one """ -class ThreeMLBinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface): +@runtime_checkable +class ThreeMLBinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface, Protocol): """ No new methods, just the inherited one """ -class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface): +@runtime_checkable +class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface, Protocol): """ No new methods, just the inherited one """ -class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface): +@runtime_checkable +class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface, Protocol): """ No new methods, just the inherited one """ diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 6249458c..5c8b6704 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -14,11 +14,13 @@ class DataInterface(Protocol): Not much... """ -class UnbinnedDataInterface(DataInterface): +@runtime_checkable +class UnbinnedDataInterface(DataInterface, Protocol): @property def measurements(self) -> Measurements:... -class BinnedDataInterface(DataInterface): +@runtime_checkable +class BinnedDataInterface(DataInterface, Protocol): @property def data(self) -> histpy.Histogram:... diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index a2af3e26..84a6760f 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -15,7 +15,8 @@ def get_log_like(self) -> float:... def nobservations(self) -> int: """For BIC and other statistics""" -class BinnedLikelihoodInterface(LikelihoodInterface): +@runtime_checkable +class BinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ Needs to check that data, response and bkg are compatible """ @@ -25,7 +26,8 @@ def __init__(self, bkg: BinnedBackgroundInterface, *args, **kwargs):... -class UnbinnedLikelihoodInterface(LikelihoodInterface): +@runtime_checkable +class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ Needs to check that data, response and bkg are compatible """ diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 3dd52dae..2a07e429 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -12,7 +12,8 @@ class SourceResponseInterface(Protocol): ... -class ThreeMLSourceResponseInterface(SourceResponseInterface): +@runtime_checkable +class ThreeMLSourceResponseInterface(SourceResponseInterface, Protocol): def set_model(self, model: Model): """ The model is passed as a reference and it's parameters @@ -20,13 +21,14 @@ def set_model(self, model: Model): last time the user called expectation. """ - -class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface): +@runtime_checkable +class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface, Protocol): """ No new methods. Just the inherited ones. """ -class ThreeMLBinnedSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface): +@runtime_checkable +class ThreeMLBinnedSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface, Protocol): """ No new methods. Just the inherited ones. """ diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py index 9988cff6..4802cc04 100644 --- a/docs/api/interfaces/scratch_interfaces_signatures.py +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -29,7 +29,7 @@ def data(self) -> Histogram: return self._data -class ToyBkg(ThreeMLBinnedBackgroundInterface): +class ToyBkg:#(ThreeMLBinnedBackgroundInterface): def __init__(self): self._unit_expectation = Histogram(toy_axis) self._unit_expectation[:] = 1/self._unit_expectation.nbins @@ -64,7 +64,7 @@ def set_threeml_parameters(self, norm: Parameter, **kwargs): self._threeml_parameters['norm'] = norm self.set_parameters(norm.value) -class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): +class ToySourceResponse:#(ThreeMLBinnedSourceResponseInterface): def __init__(self): self._model = None @@ -94,8 +94,6 @@ def expectation(self, axes:Axes)->Histogram: bkg = ToyBkg() bkg.set_threeml_parameters(norm = Parameter('norm', 1)) -#bkg = None # Uncomment for not bkg fit - response = ToySourceResponse() ## 3Ml model @@ -109,7 +107,8 @@ def expectation(self, axes:Axes)->Histogram: model = Model(source) -model = Model() # Uncomment for bkg-only hypothesis +#bkg = None # Uncomment for not bkg fit +#model = Model() # Uncomment for bkg-only hypothesis cosi = COSILike('cosi', data, response, bkg) From 0f55653ce22d042081b329f88e6c7b8687bf954f Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 06:28:40 -0400 Subject: [PATCH 014/133] Example separate base bkg from 3ml interface Signed-off-by: Israel Martinez --- .../scratch_interfaces_signatures.py | 70 ++++++++++++------- 1 file changed, 44 insertions(+), 26 deletions(-) diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py index 4802cc04..493d1319 100644 --- a/docs/api/interfaces/scratch_interfaces_signatures.py +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -3,8 +3,12 @@ from astromodels import Model, Parameter from cosipy.threeml import COSILike -from cosipy.interfaces import BinnedDataInterface, ThreeMLBinnedBackgroundInterface, ThreeMLBinnedSourceResponseInterface -from histpy import Axis,Axes,Histogram +from cosipy.interfaces import (BinnedDataInterface, + BinnedBackgroundInterface, + ThreeMLBinnedBackgroundInterface, + ThreeMLBackgroundInterface, + ThreeMLBinnedSourceResponseInterface) +from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -12,6 +16,7 @@ toy_axis = Axis(np.linspace(-5, 5)) + class ToyData(BinnedDataInterface): # Random data. Normal signal on opt of uniform bkg @@ -19,31 +24,31 @@ def __init__(self): self._data = Histogram(toy_axis) # Signal - self._data.fill(norm.rvs(size = 1000)) + self._data.fill(norm.rvs(size=1000)) # Bkg - self._data.fill(uniform.rvs(-5,10, size=1000)) + self._data.fill(uniform.rvs(-5, 10, size=1000)) @property def data(self) -> Histogram: return self._data -class ToyBkg:#(ThreeMLBinnedBackgroundInterface): +class ToyBkg:#(BinnedBackgroundInterface): + def __init__(self): self._unit_expectation = Histogram(toy_axis) - self._unit_expectation[:] = 1/self._unit_expectation.nbins + self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = None - self._threeml_parameters = {} - def set_parameters(self, norm) -> None: - self._norm = norm + def set_parameters(self, **params: Dict[str, Any]) -> None: + self._norm = params['norm'] @property def parameters(self) -> Dict[str, Any]: - return {'norm':self._norm} + return {'norm': self._norm} - def expectation(self, axes:Axes)->Histogram: + def expectation(self, axes: Axes) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") @@ -51,10 +56,20 @@ def expectation(self, axes:Axes)->Histogram: if self._norm is None: raise RuntimeError("Set norm parameter first") + return self._unit_expectation * self._norm + + +class ToyThreeMLBkg(ToyBkg):#(ToyBkg, ThreeMLBackgroundInterface): + def __init__(self): + super().__init__() + self._threeml_parameters = {} + + def expectation(self, axes: Axes) -> Histogram: + # In case it changed - self.set_parameters(norm= self._threeml_parameters['norm'].value) + self.set_parameters(norm=self._threeml_parameters['norm'].value) - return self._unit_expectation*self._norm + return super().expectation(axes) @property def threeml_parameters(self) -> Dict[str, Parameter]: @@ -62,19 +77,20 @@ def threeml_parameters(self) -> Dict[str, Parameter]: def set_threeml_parameters(self, norm: Parameter, **kwargs): self._threeml_parameters['norm'] = norm - self.set_parameters(norm.value) + self.set_parameters(norm = norm.value) -class ToySourceResponse:#(ThreeMLBinnedSourceResponseInterface): + +class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): def __init__(self): self._model = None self._unit_expectation = Histogram(toy_axis, - contents = np.diff(norm.cdf(toy_axis.edges))) + contents=np.diff(norm.cdf(toy_axis.edges))) def set_model(self, model: Model): self._model = model - def expectation(self, axes:Axes)->Histogram: + def expectation(self, axes: Axes) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") @@ -88,22 +104,24 @@ def expectation(self, axes:Axes)->Histogram: else: flux = self._model.sources['source'].spectrum.main.shape.k.value - return self._unit_expectation*flux + return self._unit_expectation * flux + data = ToyData() -bkg = ToyBkg() -bkg.set_threeml_parameters(norm = Parameter('norm', 1)) +bkg = ToyThreeMLBkg() +bkg.set_threeml_parameters(norm=Parameter('norm', 1)) response = ToySourceResponse() ## 3Ml model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV from threeML import Constant, PointSource, Model, JointLikelihood, DataList + spectrum = Constant() spectrum.k.value = 1 -source = PointSource("source", # arbitrary, but needs to be unique - l = 0, b = 0, # Doesn't matter - spectral_shape = spectrum) +source = PointSource("source", # arbitrary, but needs to be unique + l=0, b=0, # Doesn't matter + spectral_shape=spectrum) model = Model(source) @@ -114,14 +132,14 @@ def expectation(self, axes:Axes)->Histogram: plugins = DataList(cosi) -like = JointLikelihood(model, plugins, verbose = True) +like = JointLikelihood(model, plugins, verbose=True) like.fit() -fig,ax = plt.subplots() +fig, ax = plt.subplots() data.data.plot(ax) expectation = response.expectation(data.data.axes) if bkg is not None: expectation = expectation + bkg.expectation(data.data.axes) expectation.plot(ax) -plt.show() \ No newline at end of file +plt.show() From 0ba0ebf975b7a1000f6769b0fe3d101e902aba0a Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 08:56:35 -0400 Subject: [PATCH 015/133] Fix indent Signed-off-by: Israel Martinez --- cosipy/interfaces/likelihood_interface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index 84a6760f..b54d9bb4 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -30,7 +30,7 @@ def __init__(self, class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ Needs to check that data, response and bkg are compatible - """ + """ def __init__(self, data: UnbinnedDataInterface, response: UnbinnedExpectationInterface, From a4ec73111000bef55502ffc510a378bcc2b33839 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 08:57:28 -0400 Subject: [PATCH 016/133] Add explanations to toy example Signed-off-by: Israel Martinez --- .../scratch_interfaces_signatures.py | 95 ++++++++++++++----- 1 file changed, 69 insertions(+), 26 deletions(-) diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/scratch_interfaces_signatures.py index 493d1319..ebdaeaf8 100644 --- a/docs/api/interfaces/scratch_interfaces_signatures.py +++ b/docs/api/interfaces/scratch_interfaces_signatures.py @@ -12,34 +12,61 @@ import numpy as np from scipy.stats import norm, uniform +from threeML import Constant, PointSource, Model, JointLikelihood, DataList + + from matplotlib import pyplot as plt -toy_axis = Axis(np.linspace(-5, 5)) +""" +This is an example on how to use the new interfaces. + +To keep things simple, example itself is a toy model. +It a 1D model, with a Gaussian signal on top of a flat +uniform background. You can execute it until the end +to see a plot on how it looks like. + +It looks nothing like COSI data, but +shows how generic the interfaces can be. I'm still working +on refactoring our current code to this format. +""" + +# ======== Create toy interfaces for this model =========== + +# Simple 1D axes. Hardcoded. +toy_axis = Axis(np.linspace(-5, 5)) +nevents_signal = 1000 +nevents_bkg = 1000 class ToyData(BinnedDataInterface): - # Random data. Normal signal on opt of uniform bkg + # Random data. Normal signal on top of uniform bkg + # Since the interfaces are Protocols, they don't *have* + # to derive from the base class, but doing some helps + # code readability, especially if you use an IDE. def __init__(self): self._data = Histogram(toy_axis) # Signal - self._data.fill(norm.rvs(size=1000)) + self._data.fill(norm.rvs(size=nevents_signal)) # Bkg - self._data.fill(uniform.rvs(-5, 10, size=1000)) + self._data.fill(uniform.rvs(toy_axis.lo_lim, toy_axis.hi_lim-toy_axis.lo_lim, size=nevents_bkg)) @property def data(self) -> Histogram: return self._data -class ToyBkg:#(BinnedBackgroundInterface): +class ToyBkg(BinnedBackgroundInterface): + """ + Models a uniform background + """ def __init__(self): self._unit_expectation = Histogram(toy_axis) self._unit_expectation[:] = 1 / self._unit_expectation.nbins - self._norm = None + self._norm = 1 def set_parameters(self, **params: Dict[str, Any]) -> None: self._norm = params['norm'] @@ -53,20 +80,26 @@ def expectation(self, axes: Axes) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") - if self._norm is None: - raise RuntimeError("Set norm parameter first") - return self._unit_expectation * self._norm -class ToyThreeMLBkg(ToyBkg):#(ToyBkg, ThreeMLBackgroundInterface): +class ToyThreeMLBkg(ToyBkg, ThreeMLBinnedBackgroundInterface): + """ + This class extends the core ToyBkg class by providing the extra + "translation" methods needed to interface with 3ML. + """ + def __init__(self): + super().__init__() - self._threeml_parameters = {} - def expectation(self, axes: Axes) -> Histogram: + # 3ML "Parameter" keeps track of a few more things than + # a "bare" parameter. + self._threeml_parameters = {'norm':Parameter('norm', self._norm)} - # In case it changed + def expectation(self, axes: Axes) -> Histogram: + # Overrides ToyBkg expectation + # Update, inn case it changed externally self.set_parameters(norm=self._threeml_parameters['norm'].value) return super().expectation(axes) @@ -81,6 +114,10 @@ def set_threeml_parameters(self, norm: Parameter, **kwargs): class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): + """ + This models a Gaussian signal in 1D, centered at 0 and with std = 1. + The normalization --the "flux"-- is the only free parameters + """ def __init__(self): self._model = None @@ -97,6 +134,8 @@ def expectation(self, axes: Axes) -> Histogram: if self._model is None: raise RuntimeError("Set model first") + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. sources = self._model.sources if len(sources) == 0: @@ -107,35 +146,39 @@ def expectation(self, axes: Axes) -> Histogram: return self._unit_expectation * flux -data = ToyData() -bkg = ToyThreeMLBkg() -bkg.set_threeml_parameters(norm=Parameter('norm', 1)) +# ======= Actual code. This is how the "tutorial" will look like ================ +# Set the inputs. These will eventually open file or set specific parameters, +# but since we are generating the data and models on the fly, and most parameter +# are hardcoded above withing the classes, then it's not necessary here. +data = ToyData() response = ToySourceResponse() +bkg = ToyThreeMLBkg() -## 3Ml model +## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV -from threeML import Constant, PointSource, Model, JointLikelihood, DataList - spectrum = Constant() -spectrum.k.value = 1 source = PointSource("source", # arbitrary, but needs to be unique l=0, b=0, # Doesn't matter spectral_shape=spectrum) - model = Model(source) -#bkg = None # Uncomment for not bkg fit +# Here you can set the parameters initial values, bounds, etc. +# This is passed to the minimizer +bkg.threeml_parameters['norm'].value = 1 +spectrum.k.value = 1 + +# Optional: Perform a background-only or a null-background fit +#bkg = None # Uncomment for no bkg #model = Model() # Uncomment for bkg-only hypothesis +# Fit cosi = COSILike('cosi', data, response, bkg) - plugins = DataList(cosi) - -like = JointLikelihood(model, plugins, verbose=True) - +like = JointLikelihood(model, plugins) like.fit() +# Plot results fig, ax = plt.subplots() data.data.plot(ax) expectation = response.expectation(data.data.axes) From 99e84e78b0ee541928beee587a48928f7aa57d8e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 08:58:12 -0400 Subject: [PATCH 017/133] Rename toy example interfaces Signed-off-by: Israel Martinez --- ...scratch_interfaces_signatures.py => toy_interfaces_example.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/api/interfaces/{scratch_interfaces_signatures.py => toy_interfaces_example.py} (100%) diff --git a/docs/api/interfaces/scratch_interfaces_signatures.py b/docs/api/interfaces/toy_interfaces_example.py similarity index 100% rename from docs/api/interfaces/scratch_interfaces_signatures.py rename to docs/api/interfaces/toy_interfaces_example.py From f24c19db85c58309c2dd6cc378395de0de9601be Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 09:00:01 -0400 Subject: [PATCH 018/133] Fix import warnings Signed-off-by: Israel Martinez --- docs/api/interfaces/toy_interfaces_example.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index ebdaeaf8..128c4f70 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -1,19 +1,15 @@ from typing import Dict, Any -from astromodels import Model, Parameter - from cosipy.threeml import COSILike from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, ThreeMLBinnedBackgroundInterface, - ThreeMLBackgroundInterface, ThreeMLBinnedSourceResponseInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform -from threeML import Constant, PointSource, Model, JointLikelihood, DataList - +from threeML import Constant, PointSource, Model, JointLikelihood, DataList, Parameter from matplotlib import pyplot as plt @@ -100,7 +96,7 @@ def __init__(self): def expectation(self, axes: Axes) -> Histogram: # Overrides ToyBkg expectation # Update, inn case it changed externally - self.set_parameters(norm=self._threeml_parameters['norm'].value) + self.set_parameters(norm = self._threeml_parameters['norm'].value) return super().expectation(axes) From d358b73832b1962e6d7127dc45b81607e8630b36 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 09:01:01 -0400 Subject: [PATCH 019/133] Null background no longer exists Signed-off-by: Israel Martinez --- tests/interfaces/test_background_interface.py | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/tests/interfaces/test_background_interface.py b/tests/interfaces/test_background_interface.py index 6a0831c1..cfc8227c 100644 --- a/tests/interfaces/test_background_interface.py +++ b/tests/interfaces/test_background_interface.py @@ -1,21 +1,5 @@ -from cosipy.interfaces import (NullBackground, - BackgroundInterface, +from cosipy.interfaces import (BackgroundInterface, BinnedBackgroundInterface, UnbinnedBackgroundInterface ) -def test_null_background(): - null_1 = NullBackground() - null_2 = NullBackground - null_3 = NullBackground - - assert null_1 is null_2 - assert null_2 is null_3 - assert null_3 is null_1 - assert isinstance(null_1, BackgroundInterface) - assert isinstance(null_2, BinnedBackgroundInterface) - assert isinstance(null_3, UnbinnedBackgroundInterface) - - class RandomClass: pass - - assert not isinstance(null_1, RandomClass) From 6fc062ecbbfe6c26ce053c9379daa3011955460e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 09:35:47 -0400 Subject: [PATCH 020/133] Fix type hints for kwargs Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 4 ++-- docs/api/interfaces/toy_interfaces_example.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 86d0b42a..9d20dd7c 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -21,7 +21,7 @@ @runtime_checkable class BackgroundInterface(Protocol): - def set_parameters(self, **params:Dict[str, Any]) -> None:... + def set_parameters(self, **parameters:Any) -> None:... @property def parameters(self) -> Dict[str, Any]:... @@ -32,7 +32,7 @@ class ThreeMLBackgroundInterface(BackgroundInterface, Protocol): with arbitrary type from/to 3ML parameters """ - def set_threeml_parameters(self, **parameters: Dict[str, Parameter]): + def set_threeml_parameters(self, **parameters: Parameter): """ The Parameter objects are passed "as reference", and can change. Remember to call set_parameters() before computing the expetation diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 128c4f70..7c371d7d 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -64,8 +64,8 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 - def set_parameters(self, **params: Dict[str, Any]) -> None: - self._norm = params['norm'] + def set_parameters(self, **parameters:Any) -> None: + self._norm = parameters['norm'] @property def parameters(self) -> Dict[str, Any]: @@ -104,9 +104,9 @@ def expectation(self, axes: Axes) -> Histogram: def threeml_parameters(self) -> Dict[str, Parameter]: return self._threeml_parameters - def set_threeml_parameters(self, norm: Parameter, **kwargs): - self._threeml_parameters['norm'] = norm - self.set_parameters(norm = norm.value) + def set_threeml_parameters(self, **parameters: Parameter): + self._threeml_parameters = parameters + self.set_parameters(norm = parameters['norm'].value) class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): From 35dc2f3b58baa431c2190bf8ecc6213c756469d5 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Apr 2025 15:46:01 -0400 Subject: [PATCH 021/133] Divide 3ML model response from 3ML source response. The former can use multiple instances of the latter, once per source Signed-off-by: Israel Martinez --- .../interfaces/source_response_interface.py | 51 +++++++++++++-- cosipy/threeml/COSILike.py | 4 +- docs/api/interfaces/toy_interfaces_example.py | 62 ++++++++++++++----- 3 files changed, 94 insertions(+), 23 deletions(-) diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 2a07e429..89f1fa32 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -1,19 +1,23 @@ from typing import Protocol, runtime_checkable from astromodels import Model +from astromodels.sources import Source from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface __all__ = ["SourceResponseInterface", + "ThreeMLModelResponseInterface", + "UnbinnedThreeMLModelResponseInterface", + "BinnedThreeMLModelResponseInterface", "ThreeMLSourceResponseInterface", - "ThreeMLUnbinnedSourceResponseInterface", - "ThreeMLBinnedSourceResponseInterface"] + "UnbinnedThreeMLSourceResponseInterface", + "BinnedThreeMLSourceResponseInterface"] @runtime_checkable class SourceResponseInterface(Protocol): ... @runtime_checkable -class ThreeMLSourceResponseInterface(SourceResponseInterface, Protocol): +class ThreeMLModelResponseInterface(SourceResponseInterface, Protocol): def set_model(self, model: Model): """ The model is passed as a reference and it's parameters @@ -22,13 +26,50 @@ def set_model(self, model: Model): """ @runtime_checkable -class ThreeMLUnbinnedSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface, Protocol): +class UnbinnedThreeMLModelResponseInterface(UnbinnedExpectationInterface, ThreeMLModelResponseInterface, Protocol): """ No new methods. Just the inherited ones. """ @runtime_checkable -class ThreeMLBinnedSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface, Protocol): +class BinnedThreeMLModelResponseInterface(ThreeMLModelResponseInterface, BinnedExpectationInterface, Protocol): """ No new methods. Just the inherited ones. """ + +@runtime_checkable +class ThreeMLSourceResponseInterface(SourceResponseInterface, Protocol): + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + def copy(self) -> "ThreeMLSourceResponseInterface": + """ + This method is used to re-use the same object for multiple + sources. + It is expected to return a copy of itself, but deepcopy it's + and any other necessary information such that when + a new source is set, the expectation calculation + are independent. + + psr1 = ThreeMLSourceResponse() + psr2 = psr.copy() + psr1.set_source(source1) + psr2.set_source(source2) + """ + +@runtime_checkable +class UnbinnedThreeMLSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + +@runtime_checkable +class BinnedThreeMLSourceResponseInterface(ThreeMLSourceResponseInterface, BinnedExpectationInterface, Protocol): + """ + No new methods. Just the inherited ones. + """ + + diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index 2bb38679..2498017f 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -2,7 +2,7 @@ from threeML import PluginPrototype, Parameter from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood from cosipy.interfaces import (DataInterface, - ThreeMLSourceResponseInterface, + ThreeMLModelResponseInterface, ThreeMLBackgroundInterface, LikelihoodInterface, ThreeMLBinnedBackgroundInterface) @@ -11,7 +11,7 @@ class COSILike(PluginPrototype): def __init__(self, name, data: DataInterface, - response: ThreeMLSourceResponseInterface, + response: ThreeMLModelResponseInterface, bkg: ThreeMLBackgroundInterface, likelihood = 'poisson'): """ diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 7c371d7d..460f404a 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -1,10 +1,14 @@ from typing import Dict, Any +from astromodels.sources import Source + from cosipy.threeml import COSILike from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, ThreeMLBinnedBackgroundInterface, - ThreeMLBinnedSourceResponseInterface) + BinnedThreeMLModelResponseInterface, + BinnedThreeMLSourceResponseInterface, + ThreeMLSourceResponseInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -13,6 +17,8 @@ from matplotlib import pyplot as plt +import copy + """ This is an example on how to use the new interfaces. @@ -26,7 +32,6 @@ on refactoring our current code to this format. """ - # ======== Create toy interfaces for this model =========== # Simple 1D axes. Hardcoded. @@ -109,38 +114,62 @@ def set_threeml_parameters(self, **parameters: Parameter): self.set_parameters(norm = parameters['norm'].value) -class ToySourceResponse(ThreeMLBinnedSourceResponseInterface): +class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface): """ This models a Gaussian signal in 1D, centered at 0 and with std = 1. The normalization --the "flux"-- is the only free parameters """ def __init__(self): - self._model = None + self._source = None self._unit_expectation = Histogram(toy_axis, contents=np.diff(norm.cdf(toy_axis.edges))) - def set_model(self, model: Model): - self._model = model + def set_source(self, source: Source): + + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source def expectation(self, axes: Axes) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") - if self._model is None: - raise RuntimeError("Set model first") + if self._source is None: + raise RuntimeError("Set a source first") # Get the latest values of the flux # Remember that _model can be modified externally between calls. - sources = self._model.sources - - if len(sources) == 0: - flux = 0. - else: - flux = self._model.sources['source'].spectrum.main.shape.k.value + flux = self._source.spectrum.main.shape.k.value return self._unit_expectation * flux + def copy(self) -> "ToyPointSourceResponse": + return copy.copy(self) + +class ToyModelResponse(BinnedThreeMLModelResponseInterface): + + def __init__(self, psr: BinnedThreeMLSourceResponseInterface): + self._psr = psr + self._psr_copies = {} + + def set_model(self, model: Model): + + self._psr_copies = {} + for name,source in model.sources.items(): + + psr_copy = self._psr.copy() + psr_copy.set_source(source) + self._psr_copies[name] = psr_copy + + def expectation(self, axes: Axes) -> Histogram: + expectation = Histogram(axes) + + for source_name,psr in self._psr_copies.items(): + expectation = expectation + psr.expectation(axes) + + return expectation # ======= Actual code. This is how the "tutorial" will look like ================ @@ -148,7 +177,8 @@ def expectation(self, axes: Axes) -> Histogram: # but since we are generating the data and models on the fly, and most parameter # are hardcoded above withing the classes, then it's not necessary here. data = ToyData() -response = ToySourceResponse() +psr = ToyPointSourceResponse() +response = ToyModelResponse(psr) bkg = ToyThreeMLBkg() ## Source model @@ -165,7 +195,7 @@ def expectation(self, axes: Axes) -> Histogram: spectrum.k.value = 1 # Optional: Perform a background-only or a null-background fit -#bkg = None # Uncomment for no bkg +bkg = None # Uncomment for no bkg #model = Model() # Uncomment for bkg-only hypothesis # Fit From d5d1decb81e333ef517a5dc3df672676b625bdf6 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 23 Apr 2025 09:14:19 -0400 Subject: [PATCH 022/133] Save progress. Working on: Moving PSR calculation from COSILIKE > get_scatt_map > SC file refactoring Signed-off-by: Israel Martinez --- cosipy/response/__init__.py | 1 + .../response/threeml_point_source_response.py | 180 ++++++++++ cosipy/response/threeml_response.py | 114 ++++++ cosipy/spacecraftfile/SpacecraftFile.py | 337 +++++++++--------- docs/api/interfaces/toy_interfaces_example.py | 4 +- .../using_COSILike_with_interfaces.py | 54 +++ 6 files changed, 514 insertions(+), 176 deletions(-) create mode 100644 cosipy/response/threeml_point_source_response.py create mode 100644 cosipy/response/threeml_response.py create mode 100644 docs/api/interfaces/using_COSILike_with_interfaces.py diff --git a/cosipy/response/__init__.py b/cosipy/response/__init__.py index 5b8a99f2..3c178596 100644 --- a/cosipy/response/__init__.py +++ b/cosipy/response/__init__.py @@ -2,3 +2,4 @@ from .DetectorResponse import DetectorResponse from .FullDetectorResponse import FullDetectorResponse from .ExtendedSourceResponse import ExtendedSourceResponse +from .threeml_response import * diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py new file mode 100644 index 00000000..619a3a63 --- /dev/null +++ b/cosipy/response/threeml_point_source_response.py @@ -0,0 +1,180 @@ +import copy + +from astromodels.sources import Source, PointSource +from astropy.coordinates import SkyCoord +from histpy import Axes, Histogram +from cosipy.interfaces import BinnedThreeMLSourceResponseInterface + +from cosipy.response import FullDetectorResponse +from cosipy.spacecraftfile import SpacecraftFile, SpacecraftAttitudeMap + +__name__ = [] + +from mhealpy import HealpixMap + + +class BinnedThreeMlPointSourceResponse(BinnedThreeMLSourceResponseInterface): + """ + COSI 3ML plugin. + + Parameters + ---------- + dr: + Full detector response handle (**not** the file path) + sc_orientation: + Contains the information of the orientation: timestamps (astropy.Time) and attitudes (scoord.Attitude) that describe + the spacecraft for the duration of the data included in the analysis + """ + + def __init__(self, + dr: FullDetectorResponse, + sc_orientation: SpacecraftFile, + ): + + # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface + + self._dr = dr + self._sc_orientation = sc_orientation + + self._init_cache() + + def _init_cache(self): + + # Prevent unnecessary calculations and new memory allocations + self._expectation = None + self._scatt_map = None + + self._source = None + + # TODO: currently Model.__eq__ seems broken. It returns True even + # if the internal parameters changed. Currently, caching only work + # for the source position, but everything related to spectral and + # polarization is recalculated even if it's still the same + self._last_convolved_source = None + + def copy(self) -> "BinnedThreeMlPointSourceResponse": + """ + Safe copy to use for multiple sources + Returns + ------- + A copy than can be used safely to convolve another source + """ + new = copy.copy(self) + new._init_cache() + return new + + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source + + def expectation(self, axes:Axes)-> Histogram: + # TODO: check coordsys from axis + # TODO: Earth occ always true in this case + + # Check if we can use these axes + if 'PsiChi' not in axes.labels: + raise ValueError("PsiChi axes not present") + + if axes["PsiChi"].coordsys is None: + raise ValueError("PsiChi axes doesn't have a coordinate system") + + # Check what we can use from the cache + if self._expectation is None or self._expectation.axes != axes: + # Needs new memory allocation, and recompute everything + self._expectation = Histogram(axes) + else: + # If nothing has changed in the source, we can use the cached expectation + # as is. + # If the source has changed but the axes haven't, we can at least reuse + # it and prevent new memory allocation, we just need to zero it out + + # TODO: currently Source.__eq__ seems broken. It returns True even + # if some of the internal parameters changed. Caching the expected + # value is not implemented. Remove the "False and" when fixed + # Getting the source position explicitly does seem to work though + if False and (self._last_convolved_source == self._source): + return self._expectation + else: + self._expectation.clear() + + # Expectation calculation + + # Check if the source position change, since these operations + # are expensive + coord = self._source.position.sky_coord + if coord != self._last_convolved_source.position.sky_coord: + + coordsys = axes["PsiChi"].coordsys + + if coordsys == 'spacecraftframe': + dwell_time_map = self._get_dwell_time_map(coord) + self._psr[name] = self._dr.get_point_source_response(exposure_map=dwell_time_map) + elif self._coordsys == 'galactic': + scatt_map = self._get_scatt_map(coord) + self._psr[name] = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + else: + raise RuntimeError("Unknown coordinate system") + + return self._expectation + + coord = self._source.position.sky_coord + + + if self._last_convolved_source.position != : + + + self._last_convolved_source = copy.deepcopy(self._source) + + def _get_scatt_map(self, coord:SkyCoord)->SpacecraftAttitudeMap: + """ + Get the spacecraft attitude map of the source. + + Since we're accounting for Earth occultation, this is specific + to this coordinate + + Parameters + ---------- + coord : astropy.coordinates.SkyCoord + The coordinates of the target object. + + Returns + ------- + scatt_map : SpacecraftAttitudeMap + """ + + scatt_map = self._sc_orientation.get_scatt_map(nside=self._dr.nside * 2, target_coord=coord, + coordsys='galactic', earth_occ = True) + + return scatt_map + + def _get_dwell_time_map(self, coord: SkyCoord) -> HealpixMap: + """ + Get the dwell time map of the source. + + This is always specific to a coordinate. + + Parameters + ---------- + coord : astropy.coordinates.SkyCoord + Coordinates of the target source + + Returns + ------- + dwell_time_map : mhealpy.containers.healpix_map.HealpixMap + Dwell time map + """ + + self._sc_orientation.get_target_in_sc_frame(target_name=self._name, target_coord=coord) + dwell_time_map = self._sc_orientation.get_dwell_map(response=self._rsp_path) + + return dwell_time_map + + + diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py new file mode 100644 index 00000000..e3cd6c2f --- /dev/null +++ b/cosipy/response/threeml_response.py @@ -0,0 +1,114 @@ +import copy + +from cosipy.interfaces import BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface + +from astromodels import Model +from astromodels.sources import PointSource, ExtendedSource + +from histpy import Axes, Histogram + +__all__ = ["BinnedThreeMLResponse"] + +class BinnedThreeMLResponse(BinnedThreeMLModelResponseInterface): + + def __init__(self, + point_source_response:BinnedThreeMLSourceResponseInterface = None, + extended_source_response: BinnedThreeMLSourceResponseInterface = None): + """ + + Parameters + ---------- + point_source_response: + Response for :class:`astromodels.sources.PointSource`s. + It can be None is you don't plan to use it for point sources. + extended_source_response + Response for :class:`astromodels.sources.ExtendedSource`s + It can be None is you don't plan to use it for extended sources. + """ + self._psr = point_source_response + self._esr = extended_source_response + self._source_responses = {} + + # Cache + # Prevent unnecessary calculations and new memory allocations + self._expectation = None + self._model = None + + # TODO: currently Model.__eq__ seems broken. It returns. True even + # if the internal parameters changed. Caching the expected value + # is not implemented. + self._last_convolved_model = None + + def set_model(self, model: Model): + """ + + + Parameters + ---------- + model + + Returns + ------- + + """ + + if model is self._model: + # No need to do anything here + return + + self._source_responses = {} + + for name,source in model.sources.items(): + + if isinstance(source, PointSource): + psr_copy = self._psr.copy() + psr_copy.set_source(source) + self._source_responses[name] = psr_copy + elif isinstance(source, ExtendedSource): + esr_copy = self._esr.copy() + esr_copy.set_source(source) + self._source_responses[name] = esr_copy + else: + raise RuntimeError(f"The model contains the source {name} " + f"of type {type(source)}. I don't know " + "how to handle it!") + + self._model = model + + def expectation(self, axes:Axes)->Histogram: + """ + + Parameters + ---------- + axes + + Returns + ------- + + """ + if self._expectation is None or self._expectation.axes != axes: + # Needs new memory allocation, and recompute everything + self._expectation = Histogram(axes) + else: + # If nothing has changed in the model, we can use the cached expectation + # as is. + # If the model has changed but the axes haven't, we can at least reuse + # is and prevent new memory allocation, we just need to zero it out + + # TODO: currently Model.__eq__ seems broken. It returns. True even + # if the internal parameters changed. Caching the expected value + # is not implemented. Remove the "False and" when fixed + if False and (self._last_convolved_model == self._model): + return self._expectation + else: + self._expectation.clear() + + # Convolve all sources with the response + for source_name,psr in self._source_responses.items(): + self._expectation += psr.expectation(axes) + + # Get a copy with at model parameter values at the current time, + # not just a reference to the model object + self._last_convolved_model = copy.deepcopy(self._model) + + return self._expectation \ No newline at end of file diff --git a/cosipy/spacecraftfile/SpacecraftFile.py b/cosipy/spacecraftfile/SpacecraftFile.py index 1c779482..413d4a9b 100644 --- a/cosipy/spacecraftfile/SpacecraftFile.py +++ b/cosipy/spacecraftfile/SpacecraftFile.py @@ -3,13 +3,15 @@ import astropy.units as u from astropy.io import fits from astropy.time import Time, TimeDelta -from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic +from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic, EarthLocation, GCRS from mhealpy import HealpixMap import matplotlib.pyplot as plt from matplotlib.colors import LogNorm from matplotlib import cm, colors from scipy import interpolate +from histpy import TimeAxis + from scoords import Attitude, SpacecraftFrame from cosipy.response import FullDetectorResponse @@ -20,11 +22,11 @@ class SpacecraftFile(): - def __init__(self, time, x_pointings = None, y_pointings = None, \ - z_pointings = None, earth_zenith = None, altitude = None,\ - attitude = None, livetime = None, instrument = "COSI", \ - frame = "galactic"): - + def __init__(self, + time: Time, + attitude: Attitude, + location: GRCS, + livetime: u.Quantity): """ Handles the spacecraft orientation. Calculates the dwell time map and point source response over a certain orientation period. @@ -32,97 +34,31 @@ def __init__(self, time, x_pointings = None, y_pointings = None, \ Parameters ---------- - Time : astropy.time.Time + time: The time stamps for each pointings. Note this is NOT the time duration. - x_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the x axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the x pointings). - y_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the y axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the y pointings). - z_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the z axis of the local - coordinate system attached to the spacecraft (the default - is `None`, which implies no input for the z pointings). - earth_zenith : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the Earth zenith (the - default is `None`, which implies no input for the earth pointings). - altitude : array, optional - Altitude of the spacecraft in km. - livetime : array, optional + attitude: + Spacecraft orientation with respect to an inertial system. + location: + Location of the Spacecraft in inertial coordinates relative to + the Earth's center. + livetime: Time in seconds the instrument is live for the corresponding - energy bin (using left endpoints so that the last entry in - the ori file is 0). - attitude : numpy.ndarray, optional - The attitude of the spacecraft (the default is `None`, - which implies no input for the attitude of the spacecraft). - instrument : str, optional - The instrument name (the default is "COSI"). - frame : str, optional - The frame on which the analysis will be based (the default is "galactic"). + energy bin. Should have one less element than the number of + timestamps. """ - # check if the inputs are valid - # Time - if isinstance(time, Time): - self._time = time - else: - raise TypeError("The time should be a astropy.time.Time object") + # Note: livetime has one element less than the timestamps + self.npoints = np.broadcast_shapes(time.size, len(attitude), location.size, livetime.size + 1) - # Altitude - if not isinstance(altitude, (type(None))): - self._altitude = np.array(altitude) + self.livetime = livetime + self.location = location + self.attitude = attitude + self.time = TimeAxis(time) - # livetime - if not isinstance(livetime, (type(None))): - self.livetime = np.array(livetime) - - # x pointings - if isinstance(x_pointings, (SkyCoord, type(None))): - self.x_pointings = x_pointings - else: - raise TypeError("The x_pointing should be a NoneType or SkyCoord object!") - - # y pointings - if isinstance(y_pointings, (SkyCoord, type(None))): - self.y_pointings = y_pointings - else: - raise TypeError("The y_pointing should be a NoneType or SkyCoord object!") - - # z pointings - if isinstance(z_pointings, (SkyCoord, type(None))): - self.z_pointings = z_pointings - else: - raise TypeError("The z_pointing should be a NoneType or SkyCoord object!") - - # earth pointings - if isinstance(earth_zenith, (SkyCoord, type(None))): - self.earth_zenith = earth_zenith - else: - raise TypeError("The earth_zenith should be a NoneType or SkyCoord object!") - - # check if the x, y and z pointings are all None (no inputs). If all None, tt will try to read from attitude parameter - if self.x_pointings is None and self.y_pointings is None and self.z_pointings is None: - if attitude != None: - if type(attitude) is Attitude: - self.attitude = attitude - else: - raise TypeError("The attitude must be `scoords.attitude.Attitude` object") - else: - raise ValueError("Please input the pointings of as least two axes or attitude!") - - else: - self.attitude = None # if you have the inputs of x, y and z pointings, the attitude will be overwritten by a None value regardless of the input for the attitude variable. + @property + def time(self): + return self._time.edges - self._load_time = self._time.to_value(format = "unix") # this is not necessary, but just to make sure evething works fine... - self._x_direction = np.array([x_pointings.l.deg, x_pointings.b.deg]).T # this is not necessary, but just to make sure evething works fine... - self._z_direction = np.array([z_pointings.l.deg, z_pointings.b.deg]).T # this is not necessary, but just to make sure evething works fine... - self._earth_direction = np.array([earth_zenith.l.deg, earth_zenith.b.deg]).T # this is not necessary, but just to make sure evething works fine... - - self.frame = frame - @classmethod def parse_from_file(cls, file): @@ -140,113 +76,153 @@ def parse_from_file(cls, file): The SpacecraftFile object. """ - orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9),delimiter=' ', skiprows=1, comments=("#", "EN")) - time_stamps = orientation_file[:, 0] - axis_1 = orientation_file[:, [2, 1]] - axis_2 = orientation_file[:, [4, 3]] - axis_3 = orientation_file[:, [7, 6]] - altitude = np.array(orientation_file[:, 5]) - livetime = np.array(orientation_file[:, 8]) - livetime = livetime[:-1] # left end points, so remove last bin. - - time = Time(time_stamps, format = "unix") - xpointings = SkyCoord(l = axis_1[:,0]*u.deg, b = axis_1[:,1]*u.deg, frame = "galactic") - zpointings = SkyCoord(l = axis_2[:,0]*u.deg, b = axis_2[:,1]*u.deg, frame = "galactic") - earthpointings = SkyCoord(l = axis_3[:,0]*u.deg, b = axis_3[:,1]*u.deg, frame = "galactic") - - return cls(time, x_pointings = xpointings, z_pointings = zpointings, earth_zenith = earthpointings, altitude = altitude, livetime=livetime) - - def get_time(self, time_array = None): - + # Current SC format: + # 0: Always "OG" (for orbital geometry?) + # 1: time: timestamp in unix seconds + # 2: lat_x: galactic latitude of SC x-axis (deg) + # 3: lon_x: galactic longitude of SC x-axis (deg) + # 4: lat_z galactic latitude of SC z-axis (deg) + # 5: lon_z: galactic longitude of SC y-axis (deg) + # 6: altitude: altitude above from Earth's ellipsoid (km) + # 7: Earth_lat: galactic latitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 8: Earth_lon: galactic longitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 9: livetime (previously called SAA): accumulated uptime up to the following entry (seconds) + + time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), unpack = true, + delimiter=' ', skiprows=1, comments=("#", "EN")) + time = Time(time, format="unix") + + xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") + zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") + + attitude = Attitude.from_axes(x=xpointings, z=zpointings) + + livetime = livetime[:-1]*u.s # The last element is 0. + + # Currently, the orbit information is in a weird format. + # The altitude it's with respect to the Earth's source, like + # you would specified it in a geodetic format, while + # the lon/lat is specified in J2000, like you would in ECI. + # Eventually everything should be in ECI (GCRS in astropy + # for all purposes), but for now let's do the conversion. + # 1. Get the direction in galactic + # 2. Transform to GCRS, which uses RA/Dec (ICRS-like). + # This is represented in the unit sphere + # 3. Add the altitude by transforming to EarthLocation. + # Should take care of the non-spherical Earth + # 4. Go back GCRS, now with the correct distance + # (from the Earth's center) + zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic") + gcrs = zenith_gal.transform_to('gcrs') + earth_loc = EarthLocation.from_geodetic(lon=zenith_grcs.ra, lat=zenith_grcs.dec, height=altitude*u.km) + location = earth_loc.get_gcrs(grcs.obstime) + + return cls(time, attitude, location, livetime) + + def _interp_attitude(self, bins, weights): """ - Return the array pf pointing times as a astropy.Time object. Parameters ---------- - time_array : numpy.ndarray, optional - The time array (the default is `None`, which implies the time array will be taken from the instance). + bins + weights Returns ------- - astropy.time.Time - The time stamps of the orientation. + """ - if time_array == None: - self._time = Time(self._load_time, format = "unix") - else: - self._time = Time(time_array, format = "unix") + def interp_attitude(self): + """ - return self._time + Returns + ------- - def get_altitude(self): + """ + def _interp_location(self, bins, weights): """ - Return the array of Earth altitude. - + Parameters + ---------- + bins + weights + + Returns + ------- + + """ + + def interp_location(self): + """ Returns ------- - numpy array - the Earth altitude. - """ - return self._altitude + """ + + def _cummulative_livetime(bins, weights): + + if times.size == 1: + delta = TimeDelta([] * u.s) + else: + delta = times[1:] - times[:-1] + + + within_bin = self._time.widths[bins] * weights[0] - def get_time_delta(self, time_array = None): + return (self._time.lower_bounds.gps - self._time.lo_lim.gps)*u.s + within_bin + def cummulative_livetime(self, time: Time) -> u.Quantity: """ - Return an array of the time period between neighbouring time points. + Get the cummulative live time up to this time. + + The live time in between the internal timestamp is + assumed constant. Parameters ---------- - time_array : numpy.ndarray, optional - The time delta array (the default is `None`, which implies the time array will be taken from the instance). + time: + Timestamps Returns ------- - time_delta : astropy.time.Time - The time difference between the neighbouring time stamps. + Cummulative live time, with units. """ - if time_array == None: - self._time_delta = np.diff(self._load_time) - else: - self._time_delta = np.diff(time_array) + bins, weights = self._time.interp_weights(times) - time_delta = TimeDelta(self._time_delta * u.second) + return self._cummulative_livetime(bins, weights) - return time_delta - - def interpolate_direction(self, trigger, idx, direction): + def interp(self, times: Time) -> Tuple[Attitude, GCRS, u.Quantity]: """ - Linearly interpolates position at a given time between two timestamps. + Linearly interpolates attitude and position at a given time. Parameters ---------- - trigger : astropy.time.Time - The time of the event. - idx : int - The closest index in the pointing to the trigger time. - direction : numpy.ndarray - The pointing axis (x,z). + times: + Timestamps to interpolate Returns ------- - numpy.ndarray - The interpolated positions. + (attitude, location, livetime) + Shape (time.size, time.size, time.size - 1) """ - new_direction_lat = np.interp(trigger.value, self._load_time[idx : idx + 2], direction[idx : idx + 2, 1]) - if (direction[idx, 0] > direction[idx + 1, 0]): - new_direction_long = np.interp(trigger.value, self._load_time[idx : idx + 2], [direction[idx, 0], 360 + direction[idx + 1, 0]]) - new_direction_long = new_direction_long - 360 - else: - new_direction_long = np.interp(trigger.value, self._load_time[idx : idx + 2], direction[idx : idx + 2, 0]) + if times.size < 2: + raise ValueError("We need at least two time stamps. See also interp_attitude and inter_location") + + bins, weights = self._time.interp_weights(times) + + interp_attitude = self._interp_attitude(bins, weights) + interp_location = self._interp_location(bins, weights) + + interp_livetime = self._cummulative_livetime(bins, weights) - return np.array([new_direction_long, new_direction_lat]) + # Fix liv + + return self.__class__(times, interp_attitude, interp_location, interp_livetime) def source_interval(self, start, stop): @@ -286,9 +262,9 @@ def source_interval(self, start, stop): else: start_idx = self._load_time.searchsorted(start.value) - 1 - x_direction_start = self.interpolate_direction(start, start_idx, self._x_direction) - z_direction_start = self.interpolate_direction(start, start_idx, self._z_direction) - earth_direction_start = self.interpolate_direction(start, start_idx, self._earth_direction) + x_direction_start = self.interp(start, start_idx, self._x_direction) + z_direction_start = self.interp(start, start_idx, self._z_direction) + earth_direction_start = self.interp(start, start_idx, self._earth_direction) new_times = self._load_time[start_idx + 1 : stop_idx + 1] new_times = np.insert(new_times, 0, start.value) @@ -320,9 +296,9 @@ def source_interval(self, start, stop): if (stop.value % 1 != 0): stop_idx = self._load_time.searchsorted(stop.value) - 1 - x_direction_stop = self.interpolate_direction(stop, stop_idx, self._x_direction) - z_direction_stop = self.interpolate_direction(stop, stop_idx, self._z_direction) - earth_direction_stop = self.interpolate_direction(stop, stop_idx, self._earth_direction) + x_direction_stop = self.interp(stop, stop_idx, self._x_direction) + z_direction_stop = self.interp(stop, stop_idx, self._z_direction) + earth_direction_stop = self.interp(stop, stop_idx, self._earth_direction) new_times = np.delete(new_times, -1) new_times = np.append(new_times, stop.value) @@ -405,7 +381,9 @@ def get_attitude(self, x_pointings = None, y_pointings = None, z_pointings = Non return self.attitude - def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, quiet = False, save = False): + + + def get_target_in_sc_frame(self, target_coord, attitude = None, quiet = False, save = False): """ Convert the x, y and z pointings of the spacescraft axes to the path of the source in the spacecraft frame. @@ -413,8 +391,6 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui Parameters ---------- - target_name : str - The name of the target object. target_coord : astropy.coordinates.SkyCoord The coordinates of the target object. attitude: scoords.Attitude, optional @@ -435,8 +411,7 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui else: self.attitude = self.get_attitude() - self.target_name = target_name - if quiet == False: + if not quite: logger.info("Now converting to the Spacecraft frame...") self.src_path_cartesian = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value), representation_type = 'cartesian', @@ -447,22 +422,36 @@ def get_target_in_sc_frame(self, target_name, target_coord, attitude = None, qui self.src_path_spherical = cartesian_to_spherical(self.src_path_cartesian.x, self.src_path_cartesian.y, self.src_path_cartesian.z) - if quiet == False: + if not quiet: logger.info(f"Conversion completed!") - # generate the numpy array of l and b to save to a npy file - l = np.array(self.src_path_spherical[2].deg) # note that 0 is Quanty, 1 is latitude and 2 is longitude and they are in rad not deg - b = np.array(self.src_path_spherical[1].deg) - self.src_path_lb = np.stack((l,b), axis=-1) - - if save == True: - np.save(self.target_name+"_source_path_in_SC_frame", self.src_path_lb) # convert to SkyCoord objects to get the output object of this method self.src_path_skycoord = SkyCoord(self.src_path_lb[:,0], self.src_path_lb[:,1], unit = "deg", frame = SpacecraftFrame()) return self.src_path_skycoord + def write(self, filename): + """ + + Parameters + ---------- + filename + + Returns + ------- + + """ + + # generate the numpy array of l and b to save to a npy file + l = np.array(self.src_path_spherical[ + 2].deg) # note that 0 is Quanty, 1 is latitude and 2 is longitude and they are in rad not deg + b = np.array(self.src_path_spherical[1].deg) + + src_path_lb = np.stack((l, b), axis=-1) + + + np.save(filename, self.src_path_lb) def get_dwell_map(self, response, src_path = None, save = False, pa_convention=None): diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 460f404a..9ef16729 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -167,7 +167,7 @@ def expectation(self, axes: Axes) -> Histogram: expectation = Histogram(axes) for source_name,psr in self._psr_copies.items(): - expectation = expectation + psr.expectation(axes) + expectation += psr.expectation(axes) return expectation @@ -195,7 +195,7 @@ def expectation(self, axes: Axes) -> Histogram: spectrum.k.value = 1 # Optional: Perform a background-only or a null-background fit -bkg = None # Uncomment for no bkg +#bkg = None # Uncomment for no bkg #model = Model() # Uncomment for bkg-only hypothesis # Fit diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py new file mode 100644 index 00000000..590a26d1 --- /dev/null +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -0,0 +1,54 @@ +from cosipy.threeml import COSILike +from cosipy.response import BinnedThreeMLResponse +from threeML import Band, PointSource, Model, JointLikelihood, DataList, Parameter + +from astropy import units as u + +# Options for point sources +psr = BinnedThreeMlPointSourceResponse() + +psr = BinnedThreeMlPointSourceResponse + +# Option for extended sources +esr = BinnedThreeMLExtendedSourceResponse() + +response = BinnedThreeMLResponse(point_source_response = psr, + extended_source_response = esr) + +# Set model +l = 93. +b = -53. + +alpha = -1 +beta = -3 +xp = 450. * u.keV +piv = 500. * u.keV +K = 1 / u.cm / u.cm / u.s / u.keV + +spectrum = Band() + +spectrum.beta.min_value = -15.0 + +spectrum.alpha.value = alpha +spectrum.beta.value = beta +spectrum.xp.value = xp.value +spectrum.K.value = K.value +spectrum.piv.value = piv.value + +spectrum.xp.unit = xp.unit +spectrum.K.unit = K.unit +spectrum.piv.unit = piv.unit + +source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l = l, # Longitude (deg) + b = b, # Latitude (deg) + spectral_shape = spectrum) # Spectral model + +model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + +# Optional: if you want to call get_log_like manually, then you also need to set the model manually +# 3ML does this internally during the fit though +cosi = COSILike('cosi', data, response, bkg) +plugins = DataList(cosi) +like = JointLikelihood(model, plugins) +like.fit() From b4a962a8e6b840229535e6950fc0cfefbcfbbe70 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 30 Apr 2025 22:26:35 -0400 Subject: [PATCH 023/133] Refactor SpacecraftFile. Unit tests WiP Signed-off-by: Israel Martinez --- cosipy/__init__.py | 2 +- cosipy/spacecraftfile/SpacecraftFile.py | 1143 ------------------- cosipy/spacecraftfile/__init__.py | 2 +- cosipy/spacecraftfile/rsp_to_arf_rmf.py | 543 +++++++++ cosipy/spacecraftfile/spacecraft_file.py | 544 +++++++++ tests/spacecraftfile/test_spacecraftfile.py | 28 +- 6 files changed, 1102 insertions(+), 1160 deletions(-) delete mode 100644 cosipy/spacecraftfile/SpacecraftFile.py create mode 100644 cosipy/spacecraftfile/rsp_to_arf_rmf.py create mode 100644 cosipy/spacecraftfile/spacecraft_file.py diff --git a/cosipy/__init__.py b/cosipy/__init__.py index f3fdbb05..c6d47094 100644 --- a/cosipy/__init__.py +++ b/cosipy/__init__.py @@ -10,7 +10,7 @@ from .threeml import COSILike from .threeml import Band_Eflux -from .spacecraftfile import SpacecraftFile +from .spacecraftfile import * from .ts_map import FastTSMap diff --git a/cosipy/spacecraftfile/SpacecraftFile.py b/cosipy/spacecraftfile/SpacecraftFile.py deleted file mode 100644 index 413d4a9b..00000000 --- a/cosipy/spacecraftfile/SpacecraftFile.py +++ /dev/null @@ -1,1143 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt -import astropy.units as u -from astropy.io import fits -from astropy.time import Time, TimeDelta -from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic, EarthLocation, GCRS -from mhealpy import HealpixMap -import matplotlib.pyplot as plt -from matplotlib.colors import LogNorm -from matplotlib import cm, colors -from scipy import interpolate - -from histpy import TimeAxis - -from scoords import Attitude, SpacecraftFrame -from cosipy.response import FullDetectorResponse - -from .scatt_map import SpacecraftAttitudeMap - -import logging -logger = logging.getLogger(__name__) - -class SpacecraftFile(): - - def __init__(self, - time: Time, - attitude: Attitude, - location: GRCS, - livetime: u.Quantity): - """ - Handles the spacecraft orientation. Calculates the dwell time - map and point source response over a certain orientation period. - Exports the point source response as RMF and ARF files that can be read by XSPEC. - - Parameters - ---------- - time: - The time stamps for each pointings. Note this is NOT the time duration. - attitude: - Spacecraft orientation with respect to an inertial system. - location: - Location of the Spacecraft in inertial coordinates relative to - the Earth's center. - livetime: - Time in seconds the instrument is live for the corresponding - energy bin. Should have one less element than the number of - timestamps. - """ - - # Note: livetime has one element less than the timestamps - self.npoints = np.broadcast_shapes(time.size, len(attitude), location.size, livetime.size + 1) - - self.livetime = livetime - self.location = location - self.attitude = attitude - self.time = TimeAxis(time) - - @property - def time(self): - return self._time.edges - - @classmethod - def parse_from_file(cls, file): - - """ - Parses timestamps, axis positions from file and returns to __init__. - - Parameters - ---------- - file : str - The file path of the pointings. - - Returns - ------- - cosipy.spacecraftfile.SpacecraftFile - The SpacecraftFile object. - """ - - # Current SC format: - # 0: Always "OG" (for orbital geometry?) - # 1: time: timestamp in unix seconds - # 2: lat_x: galactic latitude of SC x-axis (deg) - # 3: lon_x: galactic longitude of SC x-axis (deg) - # 4: lat_z galactic latitude of SC z-axis (deg) - # 5: lon_z: galactic longitude of SC y-axis (deg) - # 6: altitude: altitude above from Earth's ellipsoid (km) - # 7: Earth_lat: galactic latitude of the direction the Earth's zenith is pointing to at the SC location (deg) - # 8: Earth_lon: galactic longitude of the direction the Earth's zenith is pointing to at the SC location (deg) - # 9: livetime (previously called SAA): accumulated uptime up to the following entry (seconds) - - time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), unpack = true, - delimiter=' ', skiprows=1, comments=("#", "EN")) - time = Time(time, format="unix") - - xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") - zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") - - attitude = Attitude.from_axes(x=xpointings, z=zpointings) - - livetime = livetime[:-1]*u.s # The last element is 0. - - # Currently, the orbit information is in a weird format. - # The altitude it's with respect to the Earth's source, like - # you would specified it in a geodetic format, while - # the lon/lat is specified in J2000, like you would in ECI. - # Eventually everything should be in ECI (GCRS in astropy - # for all purposes), but for now let's do the conversion. - # 1. Get the direction in galactic - # 2. Transform to GCRS, which uses RA/Dec (ICRS-like). - # This is represented in the unit sphere - # 3. Add the altitude by transforming to EarthLocation. - # Should take care of the non-spherical Earth - # 4. Go back GCRS, now with the correct distance - # (from the Earth's center) - zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic") - gcrs = zenith_gal.transform_to('gcrs') - earth_loc = EarthLocation.from_geodetic(lon=zenith_grcs.ra, lat=zenith_grcs.dec, height=altitude*u.km) - location = earth_loc.get_gcrs(grcs.obstime) - - return cls(time, attitude, location, livetime) - - def _interp_attitude(self, bins, weights): - """ - - Parameters - ---------- - bins - weights - - Returns - ------- - - """ - - def interp_attitude(self): - """ - - Returns - ------- - - """ - - def _interp_location(self, bins, weights): - """ - - Parameters - ---------- - bins - weights - - Returns - ------- - - """ - - def interp_location(self): - """ - - Returns - ------- - - """ - - def _cummulative_livetime(bins, weights): - - if times.size == 1: - delta = TimeDelta([] * u.s) - else: - delta = times[1:] - times[:-1] - - - within_bin = self._time.widths[bins] * weights[0] - - return (self._time.lower_bounds.gps - self._time.lo_lim.gps)*u.s + within_bin - - def cummulative_livetime(self, time: Time) -> u.Quantity: - """ - Get the cummulative live time up to this time. - - The live time in between the internal timestamp is - assumed constant. - - Parameters - ---------- - time: - Timestamps - - Returns - ------- - Cummulative live time, with units. - """ - - bins, weights = self._time.interp_weights(times) - - return self._cummulative_livetime(bins, weights) - - def interp(self, times: Time) -> Tuple[Attitude, GCRS, u.Quantity]: - - """ - Linearly interpolates attitude and position at a given time. - - Parameters - ---------- - times: - Timestamps to interpolate - - Returns - ------- - (attitude, location, livetime) - Shape (time.size, time.size, time.size - 1) - """ - - if times.size < 2: - raise ValueError("We need at least two time stamps. See also interp_attitude and inter_location") - - bins, weights = self._time.interp_weights(times) - - interp_attitude = self._interp_attitude(bins, weights) - interp_location = self._interp_location(bins, weights) - - interp_livetime = self._cummulative_livetime(bins, weights) - - # Fix liv - - return self.__class__(times, interp_attitude, interp_location, interp_livetime) - - def source_interval(self, start, stop): - - """ - Returns the SpacecraftFile file class object for the source interval. - - Parameters - ---------- - start : astropy.time.Time - The start time of the orientation period. - stop : astropy.time.Time - The end time of the orientation period. - - Returns - ------- - cosipy.spacecraft.SpacecraftFile - """ - - if(start.format != 'unix' or stop.format != 'unix'): - start = Time(start.unix, format='unix') - stop = Time(stop.unix, format='unix') - - if(start > stop): - raise ValueError("start time cannot be after stop time.") - - stop_idx = self._load_time.searchsorted(stop.value) - - if (start.value % 1 == 0): - start_idx = self._load_time.searchsorted(start.value) - new_times = self._load_time[start_idx : stop_idx + 1] - new_x_direction = self._x_direction[start_idx : stop_idx + 1] - new_z_direction = self._z_direction[start_idx : stop_idx + 1] - new_earth_direction = self._earth_direction[start_idx : stop_idx + 1] - new_earth_altitude = self._altitude[start_idx : stop_idx + 1] - new_livetime = self.livetime[start_idx : stop_idx] - - else: - start_idx = self._load_time.searchsorted(start.value) - 1 - - x_direction_start = self.interp(start, start_idx, self._x_direction) - z_direction_start = self.interp(start, start_idx, self._z_direction) - earth_direction_start = self.interp(start, start_idx, self._earth_direction) - - new_times = self._load_time[start_idx + 1 : stop_idx + 1] - new_times = np.insert(new_times, 0, start.value) - - new_x_direction = self._x_direction[start_idx + 1 : stop_idx + 1] - new_x_direction = np.insert(new_x_direction, 0, x_direction_start, axis = 0) - - new_z_direction = self._z_direction[start_idx + 1 : stop_idx + 1] - new_z_direction = np.insert(new_z_direction, 0, z_direction_start, axis = 0) - - new_earth_direction = self._earth_direction[start_idx + 1 : stop_idx + 1] - new_earth_direction = np.insert(new_earth_direction, 0, earth_direction_start, axis = 0) - - # Use linear interpolation to get starting altitude at desired time. - f = interpolate.interp1d(self._time.value, self._altitude, kind="linear") - starting_alt = f(start.value) - new_earth_altitude = self._altitude[start_idx + 1 : stop_idx + 1] - new_earth_altitude = np.insert(new_earth_altitude, 0, starting_alt) - - # SAA livetime: - if self.livetime[start_idx] == 0: - udpated_livetime = 0 - else: - updated_livetime = new_times[1] - new_times[0] - - new_livetime = self.livetime[start_idx + 1 : stop_idx] - new_livetime = np.insert(new_livetime, 0, updated_livetime) - - if (stop.value % 1 != 0): - stop_idx = self._load_time.searchsorted(stop.value) - 1 - - x_direction_stop = self.interp(stop, stop_idx, self._x_direction) - z_direction_stop = self.interp(stop, stop_idx, self._z_direction) - earth_direction_stop = self.interp(stop, stop_idx, self._earth_direction) - - new_times = np.delete(new_times, -1) - new_times = np.append(new_times, stop.value) - - new_x_direction = new_x_direction[:-1] - new_x_direction = np.append(new_x_direction, [x_direction_stop], axis = 0) - - new_z_direction = new_z_direction[:-1] - new_z_direction = np.append(new_z_direction, [z_direction_stop], axis = 0) - - new_earth_direction = new_earth_direction[:-1] - new_earth_direction = np.append(new_earth_direction, [earth_direction_stop], axis = 0) - - # Use linear interpolation to get starting altitude at desired time. - f = interpolate.interp1d(self._time.value, self._altitude, kind="linear") - stop_alt = f(stop.value) - new_earth_altitude = new_earth_altitude[:-1] - new_earth_altitude = np.append(new_earth_altitude, [stop_alt]) - - # SAA livetime: - if new_livetime[-1] == 0: - udpated_livetime = 0 - else: - updated_livetime = new_times[-1] - new_times[-2] - new_livetime = new_livetime[:-1] - new_livetime = np.append(new_livetime, updated_livetime) - - time = Time(new_times, format = "unix") - xpointings = SkyCoord(l = new_x_direction[:,0]*u.deg, b = new_x_direction[:,1]*u.deg, frame = "galactic") - zpointings = SkyCoord(l = new_z_direction[:,0]*u.deg, b = new_z_direction[:,1]*u.deg, frame = "galactic") - earthpointings = SkyCoord(l = new_earth_direction[:,0]*u.deg, b = new_earth_direction[:,1]*u.deg, frame = "galactic") - altitude = new_earth_altitude - - return self.__class__(time, x_pointings = xpointings, z_pointings = zpointings, earth_zenith = earthpointings, altitude = altitude, livetime = new_livetime) - - def get_attitude(self, x_pointings = None, y_pointings = None, z_pointings = None): - - """ - Converts the x, y and z pointings to the attitude of the telescope. - - Parameters - ---------- - x_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the x axis of the local coordinate system attached to the spacecraft (the default is `None`, which implies that the x pointings will be taken from the instance). - y_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the y axis of the local coordinate system attached to the spacecraft (the default is `None`, which implies that the y pointings will be taken from the instance). - z_pointings : astropy.coordinates.SkyCoord, optional - The pointings (galactic system) of the z axis of the local coordinate system attached to the spacecraft (the default is `None`, which implies that the z pointings will be taken from the instance). - - Returns - ------- - scoords.attitude.Attitude - The attitude of the spacecraft. - """ - if self.attitude is None: - # the attitude is None, we will calculate from the x, y and z pointings - if x_pointings is not None: - self.x_pointings = x_pointings - if y_pointings is not None: - self.y_pointings = y_pointings - if z_pointings is not None: - self.z_pointings = z_pointings - - list_ = [self.x_pointings, self.y_pointings, self.z_pointings] - coord_list_of_path = [x for x in list_ if x!=None] # check how many pointings the user input - - # Check if the user input pointings from at least two axes - if len(coord_list_of_path) <= 1: - raise ValueError("You must input pointings of at least two axes") - - # Check if the inputs are SkyCoord objects - for i in coord_list_of_path: - if type(i) != SkyCoord: - raise ValueError("The coordiates must be a SkyCoord object") - - self.attitude = Attitude.from_axes(x=self.x_pointings, - y=self.y_pointings, - z=self.z_pointings, - frame = self.frame) - - return self.attitude - - - - def get_target_in_sc_frame(self, target_coord, attitude = None, quiet = False, save = False): - - """ - Convert the x, y and z pointings of the spacescraft axes to the path of the source in the spacecraft frame. - Specify the pointings of at least two axes. - - Parameters - ---------- - target_coord : astropy.coordinates.SkyCoord - The coordinates of the target object. - attitude: scoords.Attitude, optional - The attitude of the spacecraft (the default is `None`, which implies the attitude will be taken from the instance). - quiet : bool, default=False - Setting `True` to stop printing the messages. - save : bool, default=False - Setting `True` to save the target coordinates in the spacecraft frame. - - Returns - ------- - astropy.coordinates.SkyCoord - The target coordinates in the spacecraft frame. - """ - - if attitude != None: - self.attitude = attitude - else: - self.attitude = self.get_attitude() - - if not quite: - logger.info("Now converting to the Spacecraft frame...") - self.src_path_cartesian = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value), - representation_type = 'cartesian', - frame = SpacecraftFrame()) - - # The conversion above is in Cartesian frame, so we have to convert them to the spherical one. - - self.src_path_spherical = cartesian_to_spherical(self.src_path_cartesian.x, - self.src_path_cartesian.y, - self.src_path_cartesian.z) - if not quiet: - logger.info(f"Conversion completed!") - - - # convert to SkyCoord objects to get the output object of this method - self.src_path_skycoord = SkyCoord(self.src_path_lb[:,0], self.src_path_lb[:,1], unit = "deg", frame = SpacecraftFrame()) - - return self.src_path_skycoord - - def write(self, filename): - """ - - Parameters - ---------- - filename - - Returns - ------- - - """ - - # generate the numpy array of l and b to save to a npy file - l = np.array(self.src_path_spherical[ - 2].deg) # note that 0 is Quanty, 1 is latitude and 2 is longitude and they are in rad not deg - b = np.array(self.src_path_spherical[1].deg) - - src_path_lb = np.stack((l, b), axis=-1) - - - np.save(filename, self.src_path_lb) - - def get_dwell_map(self, response, src_path = None, save = False, pa_convention=None): - - """ - Generates the dwell time map for the source. - - Parameters - ---------- - response : str or pathlib.Path - The path to the response file. - src_path : astropy.coordinates.SkyCoord, optional - The movement of source in the detector frame (the default is `None`, which implies that the `src_path` will be read from the instance). - save : bool, default=False - Set True to save the dwell time map. - pa_convention : str, optional - Polarization convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') - - Returns - ------- - mhealpy.containers.healpix_map.HealpixMap - The dwell time map. - """ - - # Define the response - self.response_file = response - - # Define the dts - self.dts = self.get_time_delta() - - # define the target source path in the SC frame - if src_path is None: - path = self.src_path_skycoord - else: - path = src_path - # check if the target source path is astropy.Skycoord object - if type(path) != SkyCoord: - raise TypeError("The coordinates of the source movement in the Spacecraft frame must be a SkyCoord object") - - if path.shape[0]-1 != self.dts.shape[0]: - raise ValueError("The dimensions of the dts or source coordinates are not correct. Please check your inputs.") - - with FullDetectorResponse.open(self.response_file, pa_convention=pa_convention) as response: - self.dwell_map = HealpixMap(base = response, - coordsys = SpacecraftFrame()) - - # Get the unique pixels to weight, and sum all the correspondint weights first, so - # each pixels needs to be called only once. - # Based on https://stackoverflow.com/questions/23268605/grouping-indices-of-unique-elements-in-numpy - - # remove the last value. Effectively a 0th order interpolations - pixels, weights = self.dwell_map.get_interp_weights(theta = self.src_path_skycoord[:-1]) - - weighted_duration = weights * self.dts.to_value(u.second)[None] - - pixels = pixels.flatten() - weighted_duration = weighted_duration.flatten() - - pixels_argsort = np.argsort(pixels) - - pixels = pixels[pixels_argsort] - weighted_duration = weighted_duration[pixels_argsort] - - first_unique = np.concatenate(([True], pixels[1:] != pixels[:-1])) - - pixel_unique = pixels[first_unique] - - splits = np.nonzero(first_unique)[0][1:] - pixel_durations = [np.sum(weighted_duration[start:stop]) for start,stop in zip(np.append(0,splits), np.append(splits, pixels.size))] - - for pix, dur in zip(pixel_unique, pixel_durations): - self.dwell_map[pix] += dur - - self.dwell_map.to(u.second, update = False, copy = False) - - if save == True: - self.dwell_map.write_map(self.target_name + "_DwellMap.fits", overwrite = True) - - return self.dwell_map - - def get_scatt_map(self, - nside, - target_coord=None, - scheme = 'ring', - coordsys = 'galactic', - r_earth = 6378.0, - earth_occ = True - ): - - """ - Bin the spacecraft attitude history into a 4D histogram that - contains the accumulated time the axes of the spacecraft where - looking at a given direction. - - Parameters - ---------- - target_coord : astropy.coordinates.SkyCoord, optional - The coordinates of the target object. - nside : int - The nside of the scatt map. - scheme : str, optional - The scheme of the scatt map (the default is "ring") - coordsys : str, optional - The coordinate system used in the scatt map (the default is "galactic). - r_earth : float, optional - Earth radius in km (default is 6378 km). - earth_occ : bool, optional - Option to include Earth occultation in scatt map calculation. - Default is True. - - Returns - ------- - h_ori : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap - The spacecraft attitude map. - """ - - # Check if target_coord is needed - if earth_occ and target_coord is None: - raise ValueError("target_coord is needed when earth_occ = True") - - # Get orientations - timestamps = self.get_time() - attitudes = self.get_attitude() - - # Altitude at each point in the orbit: - altitude = self._altitude - - # Earth zenith at each point in the orbit: - earth_zenith = self.earth_zenith - - # Fill (only 2 axes needed to fully define the orientation) - h_ori = SpacecraftAttitudeMap(nside = nside, - scheme = scheme, - coordsys = coordsys) - - x,y,z = attitudes[:-1].as_axes() - - # Get max angle based on altitude: - max_angle = np.pi - np.arcsin(r_earth/(r_earth + altitude)) - max_angle *= (180/np.pi) # angles in degree - - # Define weights and set to 0 if blocked by Earth: - weight = self.livetime*u.s - - if earth_occ: - # Calculate angle between source direction and Earth zenith - # for each time stamp: - src_angle = target_coord.separation(earth_zenith) - - # Get pointings that are occulted by Earth: - earth_occ_index = src_angle.value >= max_angle - - # Mask - weight[earth_occ_index[:-1]] = 0 - - # Fill histogram: - h_ori.fill(x, y, weight = weight) - - return h_ori - - - def get_psr_rsp(self, response = None, dwell_map = None, dts = None, pa_convention=None): - - """ - Generates the point source response based on the response file and dwell time map. - dts is used to find the exposure time for this observation. - - Parameters - ---------- - :response : str or pathlib.Path, optional - The response for the observation (the defaul is `None`, which implies that the `response` will be read from the instance). - dwell_map : str, optional - The time dwell map for the source, you can load saved dwell time map using this parameter if you've saved it before (the defaul is `None`, which implies that the `dwell_map` will be read from the instance). - dts : numpy.ndarray or str, optional - The elapsed time for each pointing. It must has the same size as the pointings. If you have saved this array, you can load it using this parameter (the defaul is `None`, which implies that the `dts` will be read from the instance). - - Returns - ------- - Ei_edges : numpy.ndarray - The edges of the incident energy. - Ei_lo : numpy.ndarray - The lower edges of the incident energy. - Ei_hi : numpy.ndarray - The upper edges of the incident energy. - Em_edges : numpy.ndarray - The edges of the measured energy. - Em_lo : numpy.ndarray - The lower edges of the measured energy. - Em_hi : numpy.ndarray - The upper edges of the measured energy. - areas : numpy.ndarray - The effective area of each energy bin. - matrix : numpy.ndarray - The energy dispersion matrix. - pa_convention : str, optional - Polarization convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') - """ - - if response == None: - pass # will use the response defined in the previous steps - else: - self.response_file = response - - if dwell_map is None: # must use is None, or it throws error! - pass # will use the dwelltime map calculated in the previous steps - else: - self.dwell_map = HealpixMap.read_map(dwell_map) - - if dts == None: - self.dts = self.get_time_delta() - else: - self.dts = TimeDelta(dts*u.second) - - with FullDetectorResponse.open(self.response_file, pa_convention=pa_convention) as response: - - # get point source response - self.psr = response.get_point_source_response(self.dwell_map) - - self.Ei_edges = np.array(response.axes['Ei'].edges) - self.Ei_lo = np.float32(self.Ei_edges[:-1]) # use float32 to match the requirement of the data type - self.Ei_hi = np.float32(self.Ei_edges[1:]) - - self.Em_edges = np.array(response.axes['Em'].edges) - self.Em_lo = np.float32(self.Em_edges[:-1]) - self.Em_hi = np.float32(self.Em_edges[1:]) - - # get the effective area and matrix - logger.info("Getting the effective area ...") - self.areas = np.float32(np.array(self.psr.project('Ei').to_dense().contents))/self.dts.to_value(u.second).sum() - spectral_response = np.float32(np.array(self.psr.project(['Ei','Em']).to_dense().contents)) - self.matrix = np.float32(np.zeros((self.Ei_lo.size,self.Em_lo.size))) # initate the matrix - - logger.info("Getting the energy redistribution matrix ...") - for i in np.arange(self.Ei_lo.size): - new_raw = spectral_response[i,:]/spectral_response[i,:].sum() - self.matrix[i,:] = new_raw - self.matrix = self.matrix.T - - return self.Ei_edges, self.Ei_lo, self.Ei_hi, self.Em_edges, self.Em_lo, self.Em_hi, self.areas, self.matrix - - - def get_arf(self, out_name = None): - - """ - Converts the point source response to an arf file that can be read by XSPEC. - - Parameters - ---------- - out_name: str, optional - The name of the arf file to save. (the default is `None`, which implies that the saving name will be the target name of the instance). - """ - - if out_name == None: - self.out_name = self.target_name - else: - self.out_name = out_name - - # blow write the arf file - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - - ## Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header # print headers and their values - - col1_energ_lo = fits.Column(name="ENERG_LO", format="E",unit = "keV", array=self.Em_lo) - col2_energ_hi = fits.Column(name="ENERG_HI", format="E",unit = "keV", array=self.Em_hi) - col3_specresp = fits.Column(name="SPECRESP", format="E",unit = "cm**2", array=self.areas) - cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_specresp]) # create a ColDefs (column-definitions) object for all columns - specresp_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - specresp_bintablehdu.header.comments["TTYPE1"] = "label for field 1" - specresp_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" - specresp_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - specresp_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - specresp_bintablehdu.header.comments["TTYPE3"] = "label for field 3" - specresp_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" - specresp_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" - - specresp_bintablehdu.header["EXTNAME"] = ("SPECRESP","name of this binary table extension") - specresp_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite name") - specresp_bintablehdu.header["INSTRUME"] = ("COSI","instrument/detector name") - specresp_bintablehdu.header["FILTER"] = ("NONE","filter in use") - specresp_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - specresp_bintablehdu.header["HDUCLAS2"] = ("SPECRESP","extension contains an ARF") - specresp_bintablehdu.header["HDUVERS"] = ("1.1.0","version of format") - - new_arfhdus = fits.HDUList([primaryhdu, specresp_bintablehdu]) - new_arfhdus.writeto(f'{self.out_name}.arf', overwrite=True) - - return - - def get_rmf(self, out_name = None): - - """ - Converts the point source response to an rmf file that can be read by XSPEC. - - Parameters - ---------- - out_name: str, optional - The name of the arf file to save. (the default is None, which implies that the saving name will be the target name of the instance). - """ - - if out_name == None: - self.out_name = self.target_name - else: - self.out_name = out_name - - # blow write the arf file - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - - ## Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header # print headers and their values - - ## Create binary table HDU for MATRIX - ### prepare colums - energ_lo = [] - energ_hi = [] - n_grp = [] - f_chan = [] - n_chan = [] - matrix = [] - for i in np.arange(len(self.Ei_lo)): - energ_lo_temp = np.float32(self.Em_lo[i]) - energ_hi_temp = np.float32(self.Ei_hi[i]) - - if self.matrix[:,i].sum() != 0: - nz_matrix_idx = np.nonzero(self.matrix[:,i])[0] # non-zero index for the matrix - subsets = np.split(nz_matrix_idx, np.where(np.diff(nz_matrix_idx) != 1)[0]+1) - n_grp_temp = np.int16(len(subsets)) - f_chan_temp = [] - n_chan_temp = [] - matrix_temp = [] - for m in np.arange(n_grp_temp): - f_chan_temp += [subsets[m][0]] - n_chan_temp += [len(subsets[m])] - for m in nz_matrix_idx: - matrix_temp += [self.matrix[:,i][m]] - f_chan_temp = np.int16(np.array(f_chan_temp)) - n_chan_temp = np.int16(np.array(n_chan_temp)) - matrix_temp = np.float32(np.array(matrix_temp)) - else: - n_grp_temp = np.int16(0) - f_chan_temp = np.int16(np.array([0])) - n_chan_temp = np.int16(np.array([0])) - matrix_temp = np.float32(np.array([0])) - - energ_lo.append(energ_lo_temp) - energ_hi.append(energ_hi_temp) - n_grp.append(n_grp_temp) - f_chan.append(f_chan_temp) - n_chan.append(n_chan_temp) - matrix.append(matrix_temp) - - col1_energ_lo = fits.Column(name="ENERG_LO", format="E",unit = "keV", array=energ_lo) - col2_energ_hi = fits.Column(name="ENERG_HI", format="E",unit = "keV", array=energ_hi) - col3_n_grp = fits.Column(name="N_GRP", format="I", array=n_grp) - col4_f_chan = fits.Column(name="F_CHAN", format="PI(54)", array=f_chan) - col5_n_chan = fits.Column(name="N_CHAN", format="PI(54)", array=n_chan) - col6_n_chan = fits.Column(name="MATRIX", format="PE(161)", array=matrix) - cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_n_grp, col4_f_chan, col5_n_chan, col6_n_chan]) # create a ColDefs (column-definitions) object for all columns - matrix_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - matrix_bintablehdu.header.comments["TTYPE1"] = "label for field 1 " - matrix_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" - matrix_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" - matrix_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - matrix_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - matrix_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - matrix_bintablehdu.header.comments["TTYPE3"] = "label for field 3 " - matrix_bintablehdu.header.comments["TFORM3"] = "data format of field: 2-byte INTEGER" - matrix_bintablehdu.header.comments["TTYPE4"] = "label for field 4" - matrix_bintablehdu.header.comments["TFORM4"] = "data format of field: variable length array" - matrix_bintablehdu.header.comments["TTYPE5"] = "label for field 5" - matrix_bintablehdu.header.comments["TFORM5"] = "data format of field: variable length array" - matrix_bintablehdu.header.comments["TTYPE6"] = "label for field 6" - matrix_bintablehdu.header.comments["TFORM6"] = "data format of field: variable length array" - - matrix_bintablehdu.header["EXTNAME"] = ("MATRIX","name of this binary table extension") - matrix_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite name") - matrix_bintablehdu.header["INSTRUME"] = ("COSI","instrument/detector name") - matrix_bintablehdu.header["FILTER"] = ("NONE","filter in use") - matrix_bintablehdu.header["CHANTYPE"] = ("PI","total number of detector channels") - matrix_bintablehdu.header["DETCHANS"] = (len(self.Em_lo),"total number of detector channels") - matrix_bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - matrix_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - matrix_bintablehdu.header["HDUCLAS2"] = ("RSP_MATRIX","dataset is a spectral response matrix") - matrix_bintablehdu.header["HDUVERS"] = ("1.3.0","version of format") - matrix_bintablehdu.header["TLMIN4"] = (0,"minimum value legally allowed in column 4") - - ## Create binary table HDU for EBOUNDS - channels = np.int16(np.arange(len(self.Em_lo))) - e_min = np.float32(self.Em_lo) - e_max = np.float32(self.Em_hi) - - col1_channels = fits.Column(name="CHANNEL", format="I", array=channels) - col2_e_min = fits.Column(name="E_MIN", format="E",unit="keV", array=e_min) - col3_e_max = fits.Column(name="E_MAX", format="E",unit="keV", array=e_max) - cols = fits.ColDefs([col1_channels, col2_e_min, col3_e_max]) - ebounds_bintablehdu = fits.BinTableHDU.from_columns(cols) - - ebounds_bintablehdu.header.comments["TTYPE1"] = "label for field 1" - ebounds_bintablehdu.header.comments["TFORM1"] = "data format of field: 2-byte INTEGER" - ebounds_bintablehdu.header.comments["TTYPE2"] = "label for field 2" - ebounds_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" - ebounds_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" - ebounds_bintablehdu.header.comments["TTYPE3"] = "label for field 3" - ebounds_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" - ebounds_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" - - ebounds_bintablehdu.header["EXTNAME"] = ("EBOUNDS","name of this binary table extension") - ebounds_bintablehdu.header["TELESCOP"] = ("COSI","mission/satellite") - ebounds_bintablehdu.header["INSTRUME"] = ("COSI","nstrument/detector name") - ebounds_bintablehdu.header["FILTER"] = ("NONE","filter in use") - ebounds_bintablehdu.header["CHANTYPE"] = ("PI","channel type (PHA or PI)") - ebounds_bintablehdu.header["DETCHANS"] = (len(self.Em_lo),"total number of detector channels") - ebounds_bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - ebounds_bintablehdu.header["HDUCLAS1"] = ("RESPONSE","dataset relates to spectral response") - ebounds_bintablehdu.header["HDUCLAS2"] = ("EBOUNDS","dataset is a spectral response matrix") - ebounds_bintablehdu.header["HDUVERS"] = ("1.2.0","version of format") - - new_rmfhdus = fits.HDUList([primaryhdu, matrix_bintablehdu,ebounds_bintablehdu]) - new_rmfhdus.writeto(f'{self.out_name}.rmf', overwrite=True) - - return - - def get_pha(self, src_counts, errors, rmf_file = None, arf_file = None, bkg_file = None, exposure_time = None, dts = None, telescope="COSI", instrument="COSI"): - - """ - Generate the pha file that can be read by XSPEC. This file stores the counts info of the source. - - Parameters - ---------- - src_counts : numpy.ndarray - The counts in each energy band. If you have src_counts with unit counts/kev/s, you must convert it to counts by multiplying it with exposure time and the energy band width. - errors : numpy.ndarray - The error for counts. It has the same unit requirement as src_counts. - rmf_file : str, optional - The rmf file name to be written into the pha file (the default is `None`, which implies that it uses the rmf file generate by function `get_rmf`) - arf_file : str, optional - The arf file name to be written into the pha file (the default is `None`, which implies that it uses the arf file generate by function `get_arf`) - bkg_file : str, optional - The background file name (the default is `None`, which implied the `src_counts` is source counts only). - exposure_time : float, optional - The exposure time for this source observation (the default is `None`, which implied that the exposure time will be calculated by `dts`). - dts : numpy.ndarray, optional - It's used to calculate the exposure time. It has the same effect as `exposure_time`. If both `exposure_time` and `dts` are given, `dts` will write over the exposure_time (the default is `None`, which implies that the `dts` will be read from the instance). - telescope : str, optional - The name of the telecope (the default is "COSI"). - instrument : str, optional - The instrument name (the default is "COSI"). - """ - - self.src_counts = src_counts - self.errors = errors - - if bkg_file != None: - self.bkg_file = bkg_file - else: - self.bkg_file = "None" - - self.bkg_file = bkg_file - - if rmf_file != None: - self.rmf_file = rmf_file - else: - self.rmf_file = f'{self.out_name}.rmf' - - if arf_file != None: - self.arf_file = arf_file - else: - self.arf_file = f'{self.out_name}.arf' - - if exposure_time != None: - self.exposure_time = exposure_time - if dts != None: - self.dts = self.__str_or_array(dts) - self.exposure_time = self.dts.sum() - self.telescope = telescope - self.instrument = instrument - self.channel_number = len(self.src_counts) - - # define other hardcoded inputs - copyright_string=" FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " - channels = np.arange(self.channel_number) - - # Create PrimaryHDU - primaryhdu = fits.PrimaryHDU() # create an empty primary HDU - primaryhdu.header["BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value - primaryhdu.header["COMMENT"] = copyright_string # add comments - primaryhdu.header["TELESCOP"] = telescope # add telescope keyword valie - primaryhdu.header["INSTRUME"] = instrument # add instrument keyword valie - primaryhdu.header # print headers and their values - - # Create binary table HDU - a1 = np.array(channels,dtype="int32") # I guess I need to convert the dtype to match the format J - a2 = np.array(self.src_counts,dtype="int64") # int32 is not enough for counts - a3 = np.array(self.errors,dtype="int64") # int32 is not enough for errors - col1 = fits.Column(name="CHANNEL", format="J", array=a1) - col2 = fits.Column(name="COUNTS", format="K", array=a2,unit="count") - col3 = fits.Column(name="STAT_ERR", format="K", array=a3,unit="count") - cols = fits.ColDefs([col1, col2, col3]) # create a ColDefs (column-definitions) object for all columns - bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object - - #add other BinTableHDU hear keywords,their values, and comments - bintablehdu.header.comments["TTYPE1"] = "label for field 1" - bintablehdu.header.comments["TFORM1"] = "data format of field: 32-bit integer" - bintablehdu.header.comments["TTYPE2"] = "label for field 2" - bintablehdu.header.comments["TFORM2"] = "data format of field: 32-bit integer" - bintablehdu.header.comments["TUNIT2"] = "physical unit of field 2" - - - bintablehdu.header["EXTNAME"] = ("SPECTRUM","name of this binary table extension") - bintablehdu.header["TELESCOP"] = (self.telescope,"telescope/mission name") - bintablehdu.header["INSTRUME"] = (self.instrument,"instrument/detector name") - bintablehdu.header["FILTER"] = ("NONE","filter type if any") - bintablehdu.header["EXPOSURE"] = (self.exposure_time,"integration time in seconds") - bintablehdu.header["BACKFILE"] = (self.bkg_file,"background filename") - bintablehdu.header["BACKSCAL"] = (1,"background scaling factor") - bintablehdu.header["CORRFILE"] = ("NONE","associated correction filename") - bintablehdu.header["CORRSCAL"] = (1,"correction file scaling factor") - bintablehdu.header["CORRSCAL"] = (1,"correction file scaling factor") - bintablehdu.header["RESPFILE"] = (self.rmf_file,"associated rmf filename") - bintablehdu.header["ANCRFILE"] = (self.arf_file,"associated arf filename") - bintablehdu.header["AREASCAL"] = (1,"area scaling factor") - bintablehdu.header["STAT_ERR"] = (0,"statistical error specified if any") - bintablehdu.header["SYS_ERR"] = (0,"systematic error specified if any") - bintablehdu.header["GROUPING"] = (0,"grouping of the data has been defined if any") - bintablehdu.header["QUALITY"] = (0,"data quality information specified") - bintablehdu.header["HDUCLASS"] = ("OGIP","format conforms to OGIP standard") - bintablehdu.header["HDUCLAS1"] = ("SPECTRUM","PHA dataset") - bintablehdu.header["HDUVERS"] = ("1.2.1","version of format") - bintablehdu.header["POISSERR"] = (False,"Poissonian errors to be assumed, T as True") - bintablehdu.header["CHANTYPE"] = ("PI","channel type (PHA or PI)") - bintablehdu.header["DETCHANS"] = (self.channel_number,"total number of detector channels") - - new_phahdus = fits.HDUList([primaryhdu, bintablehdu]) - new_phahdus.writeto(f'{self.out_name}.pha', overwrite=True) - - return - - - def plot_arf(self, file_name = None, save_name = None, dpi = 300): - - """ - Read the arf fits file, plot and save it. - - Parameters - ---------- - file_name: str, optional - The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). - save_name: str, optional - The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). - dpi: int, optional - The dpi of the saved image (the default is 300). - """ - - if file_name != None: - self.file_name = file_name - else: - self.file_name = f'{self.out_name}.arf' - - if save_name != None: - self.save_name = save_name - else: - self.save_name = self.out_name - - self.dpi = dpi - - self.arf = fits.open(self.file_name) # read file - - # SPECRESP HDU - self.specresp_hdu = self.arf["SPECRESP"] - - self.areas = np.array(self.specresp_hdu.data["SPECRESP"]) - self.Em_lo = np.array(self.specresp_hdu.data["ENERG_LO"]) - self.Em_hi = np.array(self.specresp_hdu.data["ENERG_HI"]) - - E_center = (self.Em_lo+self.Em_hi)/2 - E_edges = np.append(self.Em_lo,self.Em_hi[-1]) - - fig, ax = plt.subplots() - ax.hist(E_center,E_edges,weights=self.areas,histtype='step') - - ax.set_title("Effective area") - ax.set_xlabel("Energy[$keV$]") - ax.set_ylabel(r"Effective area [$cm^2$]") - ax.set_xscale("log") - fig.savefig(f"Effective_area_for_{self.save_name}.png", bbox_inches = "tight", pad_inches=0.1, dpi=self.dpi) - #fig.show() - - return - - - def plot_rmf(self, file_name = None, save_name = None, dpi = 300): - - """ - Read the rmf fits file, plot and save it. - - Parameters - ---------- - file_name: str, optional - The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). - save_name: str, optional - The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). - dpi: int, optional - The dpi of the saved image (the default is 300). - """ - - if file_name != None: - self.file_name = file_name - else: - self.file_name = f'{self.out_name}.rmf' - - if save_name != None: - self.save_name = save_name - else: - self.save_name = self.out_name - - self.dpi = dpi - - # Read rmf file - self.rmf = fits.open(self.file_name) # read file - - # Read the ENOUNDS information - ebounds_ext = self.rmf["EBOUNDS"] - channel_low = ebounds_ext.data["E_MIN"] # energy bin lower edges for channels (channels are just incident energy bins) - channel_high = ebounds_ext.data["E_MAX"] # energy bin higher edges for channels (channels are just incident energy bins) - - # Read the MATRIX extension - matrix_ext = self.rmf['MATRIX'] - #logger.info(repr(matrix_hdu.header[:60])) - energy_low = matrix_ext.data["ENERG_LO"] # energy bin lower edges for measured energies - energy_high = matrix_ext.data["ENERG_HI"] # energy bin higher edges for measured energies - data = matrix_ext.data - - # Create a 2-d numpy array and store probability data into the redistribution matrix - rmf_matrix = np.zeros((len(energy_low),len(channel_low))) # create an empty matrix - for i in np.arange(data.shape[0]): # i is the measured energy index, examine the matrix_ext.data rows by rows - if data[i][5].sum() == 0: # if the sum of probabilities is zero, then skip since there is no data at all - pass - else: - #measured_energy_index = np.argwhere(energy_low == data[157][0])[0][0] - f_chan = data[i][3] # get the starting channel of each subsets - n_chann = data[i][4] # get the number of channels in each subsets - matrix = data[i][5] # get the probabilities of this row (incident energy) - indices = [] - for k in f_chan: - channels = 0 - channels = np.arange(k,k + n_chann[np.argwhere(f_chan == k)]).tolist() # generate the cha - indices += channels # fappend the channels togeter - indices = np.array(indices) - for m in indices: - rmf_matrix[i][m] = matrix[np.argwhere(indices == m)[0][0]] # write the probabilities into the empty matrix - - - # plot the redistribution matrix - xcenter = np.divide(energy_low+energy_high,2) - x_center_coords = np.repeat(xcenter, 10) - y_center_coords = np.tile(xcenter, 10) - energy_all_edges = np.append(energy_low,energy_high[-1]) - #bin_edges = np.array([incident_energy_bins,incident_energy_bins]) # doesn't work - bin_edges = np.vstack((energy_all_edges, energy_all_edges)) - #logger.info(bin_edges) - - self.probability = [] - for i in np.arange(10): - for j in np.arange(10): - self.probability.append(rmf_matrix[i][j]) - #logger.info(type(probability)) - - plt.hist2d(x=x_center_coords,y=y_center_coords,weights=self.probability,bins=bin_edges, norm=LogNorm()) - plt.xscale('log') - plt.yscale('log') - plt.xlabel("Incident energy [$keV$]") - plt.ylabel("Measured energy [$keV$]") - plt.title("Redistribution matrix") - #plt.xlim([70,10000]) - #plt.ylim([70,10000]) - plt.colorbar(norm=LogNorm()) - plt.savefig(f"Redistribution_matrix_for_{self.save_name}.png", bbox_inches = "tight", pad_inches=0.1, dpi=300) - #plt.show() - - return diff --git a/cosipy/spacecraftfile/__init__.py b/cosipy/spacecraftfile/__init__.py index fd9d6898..9eb8444d 100644 --- a/cosipy/spacecraftfile/__init__.py +++ b/cosipy/spacecraftfile/__init__.py @@ -1,2 +1,2 @@ -from .SpacecraftFile import SpacecraftFile +from .spacecraft_file import * from .scatt_map import SpacecraftAttitudeMap diff --git a/cosipy/spacecraftfile/rsp_to_arf_rmf.py b/cosipy/spacecraftfile/rsp_to_arf_rmf.py new file mode 100644 index 00000000..7d5ae516 --- /dev/null +++ b/cosipy/spacecraftfile/rsp_to_arf_rmf.py @@ -0,0 +1,543 @@ +import logging +logger = logging.getLogger(__name__) + +from cosipy import SpacecraftFile + +import numpy as np +import astropy.units as u +from astropy.io import fits +from astropy.coordinates import SkyCoord + +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm + +from cosipy.response import FullDetectorResponse + +class RspArfRmfConverter: + + def __init__(self, response:FullDetectorResponse, ori:SpacecraftFile, target_coord:SkyCoord): + + self.response = response + self.ori = ori + + self.dwell_map = self.ori.get_dwell_map(target_coord, nside = response.nside, scheme = response.scheme) + + def get_psr_rsp(self): + + """ + Generates the point source response based on the response file and dwell obstime map. + livetime is used to find the exposure obstime for this observation. + + Parameters + ---------- + :response : str or pathlib.Path, optional + The response for the observation (the defaul is `None`, which implies that the `response` will be read from the instance). + dwell_map : str, optional + The obstime dwell map for the source, you can load saved dwell obstime map using this parameter if you've saved it before (the defaul is `None`, which implies that the `dwell_map` will be read from the instance). + dts : numpy.ndarray or str, optional + The elapsed obstime for each pointing. It must has the same size as the pointings. If you have saved this array, you can load it using this parameter (the defaul is `None`, which implies that the `livetime` will be read from the instance). + + Returns + ------- + Ei_edges : numpy.ndarray + The edges of the incident energy. + Ei_lo : numpy.ndarray + The lower edges of the incident energy. + Ei_hi : numpy.ndarray + The upper edges of the incident energy. + Em_edges : numpy.ndarray + The edges of the measured energy. + Em_lo : numpy.ndarray + The lower edges of the measured energy. + Em_hi : numpy.ndarray + The upper edges of the measured energy. + areas : numpy.ndarray + The effective area of each energy bin. + matrix : numpy.ndarray + The energy dispersion matrix. + pa_convention : str, optional + Polarization convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') + """ + + with self.response as response: + + # get point source response + self.psr = response.get_point_source_response(self.dwell_map) + + self.Ei_edges = np.array(response.axes['Ei'].edges) + self.Ei_lo = np.float32(self.Ei_edges[:-1]) # use float32 to match the requirement of the data type + self.Ei_hi = np.float32(self.Ei_edges[1:]) + + self.Em_edges = np.array(response.axes['Em'].edges) + self.Em_lo = np.float32(self.Em_edges[:-1]) + self.Em_hi = np.float32(self.Em_edges[1:]) + + # get the effective area and matrix + logger.info("Getting the effective area ...") + self.areas = np.float32(np.array(self.psr.project('Ei').to_dense().contents)) / self.ori.livetime.to_value( + u.second).sum() + spectral_response = np.float32(np.array(self.psr.project(['Ei', 'Em']).to_dense().contents)) + self.matrix = np.float32(np.zeros((self.Ei_lo.size, self.Em_lo.size))) # initate the matrix + + logger.info("Getting the energy redistribution matrix ...") + for i in np.arange(self.Ei_lo.size): + new_raw = spectral_response[i, :] / spectral_response[i, :].sum() + self.matrix[i, :] = new_raw + self.matrix = self.matrix.T + + return self.Ei_edges, self.Ei_lo, self.Ei_hi, self.Em_edges, self.Em_lo, self.Em_hi, self.areas, self.matrix + + def get_arf(self, out_name=None): + + """ + Converts the point source response to an arf file that can be read by XSPEC. + + Parameters + ---------- + out_name: str, optional + The name of the arf file to save. (the default is `None`, which implies that the saving name will be the target name of the instance). + """ + + if out_name is None: + self.out_name = self.target_name + else: + self.out_name = out_name + + # blow write the arf file + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + + ## Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header # print headers and their values + + col1_energ_lo = fits.Column(name="ENERG_LO", format="E", unit="keV", array=self.Em_lo) + col2_energ_hi = fits.Column(name="ENERG_HI", format="E", unit="keV", array=self.Em_hi) + col3_specresp = fits.Column(name="SPECRESP", format="E", unit="cm**2", array=self.areas) + cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, + col3_specresp]) # create a ColDefs (column-definitions) object for all columns + specresp_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + specresp_bintablehdu.header.comments["TTYPE1"] = "label for field 1" + specresp_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" + specresp_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + specresp_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + specresp_bintablehdu.header.comments["TTYPE3"] = "label for field 3" + specresp_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" + specresp_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" + + specresp_bintablehdu.header["EXTNAME"] = ("SPECRESP", "name of this binary table extension") + specresp_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite name") + specresp_bintablehdu.header["INSTRUME"] = ("COSI", "instrument/detector name") + specresp_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + specresp_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + specresp_bintablehdu.header["HDUCLAS2"] = ("SPECRESP", "extension contains an ARF") + specresp_bintablehdu.header["HDUVERS"] = ("1.1.0", "version of format") + + new_arfhdus = fits.HDUList([primaryhdu, specresp_bintablehdu]) + new_arfhdus.writeto(f'{self.out_name}.arf', overwrite=True) + + return + + def get_rmf(self, out_name=None): + + """ + Converts the point source response to an rmf file that can be read by XSPEC. + + Parameters + ---------- + out_name: str, optional + The name of the arf file to save. (the default is None, which implies that the saving name will be the target name of the instance). + """ + + if out_name is None: + self.out_name = self.target_name + else: + self.out_name = out_name + + # blow write the arf file + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + + ## Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header # print headers and their values + + ## Create binary table HDU for MATRIX + ### prepare colums + energ_lo = [] + energ_hi = [] + n_grp = [] + f_chan = [] + n_chan = [] + matrix = [] + for i in np.arange(len(self.Ei_lo)): + energ_lo_temp = np.float32(self.Em_lo[i]) + energ_hi_temp = np.float32(self.Ei_hi[i]) + + if self.matrix[:, i].sum() != 0: + nz_matrix_idx = np.nonzero(self.matrix[:, i])[0] # non-zero index for the matrix + subsets = np.split(nz_matrix_idx, np.where(np.diff(nz_matrix_idx) != 1)[0] + 1) + n_grp_temp = np.int16(len(subsets)) + f_chan_temp = [] + n_chan_temp = [] + matrix_temp = [] + for m in np.arange(n_grp_temp): + f_chan_temp += [subsets[m][0]] + n_chan_temp += [len(subsets[m])] + for m in nz_matrix_idx: + matrix_temp += [self.matrix[:, i][m]] + f_chan_temp = np.int16(np.array(f_chan_temp)) + n_chan_temp = np.int16(np.array(n_chan_temp)) + matrix_temp = np.float32(np.array(matrix_temp)) + else: + n_grp_temp = np.int16(0) + f_chan_temp = np.int16(np.array([0])) + n_chan_temp = np.int16(np.array([0])) + matrix_temp = np.float32(np.array([0])) + + energ_lo.append(energ_lo_temp) + energ_hi.append(energ_hi_temp) + n_grp.append(n_grp_temp) + f_chan.append(f_chan_temp) + n_chan.append(n_chan_temp) + matrix.append(matrix_temp) + + col1_energ_lo = fits.Column(name="ENERG_LO", format="E", unit="keV", array=energ_lo) + col2_energ_hi = fits.Column(name="ENERG_HI", format="E", unit="keV", array=energ_hi) + col3_n_grp = fits.Column(name="N_GRP", format="I", array=n_grp) + col4_f_chan = fits.Column(name="F_CHAN", format="PI(54)", array=f_chan) + col5_n_chan = fits.Column(name="N_CHAN", format="PI(54)", array=n_chan) + col6_n_chan = fits.Column(name="MATRIX", format="PE(161)", array=matrix) + cols = fits.ColDefs([col1_energ_lo, col2_energ_hi, col3_n_grp, col4_f_chan, col5_n_chan, + col6_n_chan]) # create a ColDefs (column-definitions) object for all columns + matrix_bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + matrix_bintablehdu.header.comments["TTYPE1"] = "label for field 1 " + matrix_bintablehdu.header.comments["TFORM1"] = "data format of field: 4-byte REAL" + matrix_bintablehdu.header.comments["TUNIT1"] = "physical unit of field" + matrix_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + matrix_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + matrix_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + matrix_bintablehdu.header.comments["TTYPE3"] = "label for field 3 " + matrix_bintablehdu.header.comments["TFORM3"] = "data format of field: 2-byte INTEGER" + matrix_bintablehdu.header.comments["TTYPE4"] = "label for field 4" + matrix_bintablehdu.header.comments["TFORM4"] = "data format of field: variable length array" + matrix_bintablehdu.header.comments["TTYPE5"] = "label for field 5" + matrix_bintablehdu.header.comments["TFORM5"] = "data format of field: variable length array" + matrix_bintablehdu.header.comments["TTYPE6"] = "label for field 6" + matrix_bintablehdu.header.comments["TFORM6"] = "data format of field: variable length array" + + matrix_bintablehdu.header["EXTNAME"] = ("MATRIX", "name of this binary table extension") + matrix_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite name") + matrix_bintablehdu.header["INSTRUME"] = ("COSI", "instrument/detector name") + matrix_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + matrix_bintablehdu.header["CHANTYPE"] = ("PI", "total number of detector channels") + matrix_bintablehdu.header["DETCHANS"] = (len(self.Em_lo), "total number of detector channels") + matrix_bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + matrix_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + matrix_bintablehdu.header["HDUCLAS2"] = ("RSP_MATRIX", "dataset is a spectral response matrix") + matrix_bintablehdu.header["HDUVERS"] = ("1.3.0", "version of format") + matrix_bintablehdu.header["TLMIN4"] = (0, "minimum value legally allowed in column 4") + + ## Create binary table HDU for EBOUNDS + channels = np.int16(np.arange(len(self.Em_lo))) + e_min = np.float32(self.Em_lo) + e_max = np.float32(self.Em_hi) + + col1_channels = fits.Column(name="CHANNEL", format="I", array=channels) + col2_e_min = fits.Column(name="E_MIN", format="E", unit="keV", array=e_min) + col3_e_max = fits.Column(name="E_MAX", format="E", unit="keV", array=e_max) + cols = fits.ColDefs([col1_channels, col2_e_min, col3_e_max]) + ebounds_bintablehdu = fits.BinTableHDU.from_columns(cols) + + ebounds_bintablehdu.header.comments["TTYPE1"] = "label for field 1" + ebounds_bintablehdu.header.comments["TFORM1"] = "data format of field: 2-byte INTEGER" + ebounds_bintablehdu.header.comments["TTYPE2"] = "label for field 2" + ebounds_bintablehdu.header.comments["TFORM2"] = "data format of field: 4-byte REAL" + ebounds_bintablehdu.header.comments["TUNIT2"] = "physical unit of field" + ebounds_bintablehdu.header.comments["TTYPE3"] = "label for field 3" + ebounds_bintablehdu.header.comments["TFORM3"] = "data format of field: 4-byte REAL" + ebounds_bintablehdu.header.comments["TUNIT3"] = "physical unit of field" + + ebounds_bintablehdu.header["EXTNAME"] = ("EBOUNDS", "name of this binary table extension") + ebounds_bintablehdu.header["TELESCOP"] = ("COSI", "mission/satellite") + ebounds_bintablehdu.header["INSTRUME"] = ("COSI", "nstrument/detector name") + ebounds_bintablehdu.header["FILTER"] = ("NONE", "filter in use") + ebounds_bintablehdu.header["CHANTYPE"] = ("PI", "channel type (PHA or PI)") + ebounds_bintablehdu.header["DETCHANS"] = (len(self.Em_lo), "total number of detector channels") + ebounds_bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + ebounds_bintablehdu.header["HDUCLAS1"] = ("RESPONSE", "dataset relates to spectral response") + ebounds_bintablehdu.header["HDUCLAS2"] = ("EBOUNDS", "dataset is a spectral response matrix") + ebounds_bintablehdu.header["HDUVERS"] = ("1.2.0", "version of format") + + new_rmfhdus = fits.HDUList([primaryhdu, matrix_bintablehdu, ebounds_bintablehdu]) + new_rmfhdus.writeto(f'{self.out_name}.rmf', overwrite=True) + + return + + def get_pha(self, src_counts, errors, rmf_file=None, arf_file=None, bkg_file=None, exposure_time=None, dts=None, + telescope="COSI", instrument="COSI"): + + """ + Generate the pha file that can be read by XSPEC. This file stores the counts info of the source. + + Parameters + ---------- + src_counts : numpy.ndarray + The counts in each energy band. If you have src_counts with unit counts/kev/s, you must convert it to counts by multiplying it with exposure obstime and the energy band width. + errors : numpy.ndarray + The error for counts. It has the same unit requirement as src_counts. + rmf_file : str, optional + The rmf file name to be written into the pha file (the default is `None`, which implies that it uses the rmf file generate by function `get_rmf`) + arf_file : str, optional + The arf file name to be written into the pha file (the default is `None`, which implies that it uses the arf file generate by function `get_arf`) + bkg_file : str, optional + The background file name (the default is `None`, which implied the `src_counts` is source counts only). + exposure_time : float, optional + The exposure obstime for this source observation (the default is `None`, which implied that the exposure obstime will be calculated by `livetime`). + dts : numpy.ndarray, optional + It's used to calculate the exposure obstime. It has the same effect as `exposure_time`. If both `exposure_time` and `livetime` are given, `livetime` will write over the exposure_time (the default is `None`, which implies that the `livetime` will be read from the instance). + telescope : str, optional + The name of the telecope (the default is "COSI"). + instrument : str, optional + The instrument name (the default is "COSI"). + """ + + self.src_counts = src_counts + self.errors = errors + + if bkg_file is None: + self.bkg_file = bkg_file + else: + self.bkg_file = "None" + + self.bkg_file = bkg_file + + if rmf_file is None: + self.rmf_file = rmf_file + else: + self.rmf_file = f'{self.out_name}.rmf' + + if arf_file is None: + self.arf_file = arf_file + else: + self.arf_file = f'{self.out_name}.arf' + + if exposure_time is None: + self.exposure_time = exposure_time + if dts is not None: + livetime = self.__str_or_array(dts) + self.exposure_time = livetime.sum() + self.telescope = telescope + self.instrument = instrument + self.channel_number = len(self.src_counts) + + # define other hardcoded inputs + copyright_string = " FITS (Flexible Image Transport System) format is defined in 'Astronomy and Astrophysics', volume 376, page 359; bibcode: 2001A&A...376..359H " + channels = np.arange(self.channel_number) + + # Create PrimaryHDU + primaryhdu = fits.PrimaryHDU() # create an empty primary HDU + primaryhdu.header[ + "BITPIX"] = -32 # since it's an empty HDU, I can just change the data type by resetting the BIPTIX value + primaryhdu.header["COMMENT"] = copyright_string # add comments + primaryhdu.header["TELESCOP"] = telescope # add telescope keyword valie + primaryhdu.header["INSTRUME"] = instrument # add instrument keyword valie + primaryhdu.header # print headers and their values + + # Create binary table HDU + a1 = np.array(channels, dtype="int32") # I guess I need to convert the dtype to match the format J + a2 = np.array(self.src_counts, dtype="int64") # int32 is not enough for counts + a3 = np.array(self.errors, dtype="int64") # int32 is not enough for errors + col1 = fits.Column(name="CHANNEL", format="J", array=a1) + col2 = fits.Column(name="COUNTS", format="K", array=a2, unit="count") + col3 = fits.Column(name="STAT_ERR", format="K", array=a3, unit="count") + cols = fits.ColDefs([col1, col2, col3]) # create a ColDefs (column-definitions) object for all columns + bintablehdu = fits.BinTableHDU.from_columns(cols) # create a binary table HDU object + + # add other BinTableHDU hear keywords,their values, and comments + bintablehdu.header.comments["TTYPE1"] = "label for field 1" + bintablehdu.header.comments["TFORM1"] = "data format of field: 32-bit integer" + bintablehdu.header.comments["TTYPE2"] = "label for field 2" + bintablehdu.header.comments["TFORM2"] = "data format of field: 32-bit integer" + bintablehdu.header.comments["TUNIT2"] = "physical unit of field 2" + + bintablehdu.header["EXTNAME"] = ("SPECTRUM", "name of this binary table extension") + bintablehdu.header["TELESCOP"] = (self.telescope, "telescope/mission name") + bintablehdu.header["INSTRUME"] = (self.instrument, "instrument/detector name") + bintablehdu.header["FILTER"] = ("NONE", "filter type if any") + bintablehdu.header["EXPOSURE"] = (self.exposure_time, "integration obstime in seconds") + bintablehdu.header["BACKFILE"] = (self.bkg_file, "background filename") + bintablehdu.header["BACKSCAL"] = (1, "background scaling factor") + bintablehdu.header["CORRFILE"] = ("NONE", "associated correction filename") + bintablehdu.header["CORRSCAL"] = (1, "correction file scaling factor") + bintablehdu.header["CORRSCAL"] = (1, "correction file scaling factor") + bintablehdu.header["RESPFILE"] = (self.rmf_file, "associated rmf filename") + bintablehdu.header["ANCRFILE"] = (self.arf_file, "associated arf filename") + bintablehdu.header["AREASCAL"] = (1, "area scaling factor") + bintablehdu.header["STAT_ERR"] = (0, "statistical error specified if any") + bintablehdu.header["SYS_ERR"] = (0, "systematic error specified if any") + bintablehdu.header["GROUPING"] = (0, "grouping of the data has been defined if any") + bintablehdu.header["QUALITY"] = (0, "data quality information specified") + bintablehdu.header["HDUCLASS"] = ("OGIP", "format conforms to OGIP standard") + bintablehdu.header["HDUCLAS1"] = ("SPECTRUM", "PHA dataset") + bintablehdu.header["HDUVERS"] = ("1.2.1", "version of format") + bintablehdu.header["POISSERR"] = (False, "Poissonian errors to be assumed, T as True") + bintablehdu.header["CHANTYPE"] = ("PI", "channel type (PHA or PI)") + bintablehdu.header["DETCHANS"] = (self.channel_number, "total number of detector channels") + + new_phahdus = fits.HDUList([primaryhdu, bintablehdu]) + new_phahdus.writeto(f'{self.out_name}.pha', overwrite=True) + + return + + def plot_arf(self, file_name=None, save_name=None, dpi=300): + + """ + Read the arf fits file, plot and save it. + + Parameters + ---------- + file_name: str, optional + The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). + save_name: str, optional + The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). + dpi: int, optional + The dpi of the saved image (the default is 300). + """ + + if file_name != None: + self.file_name = file_name + else: + self.file_name = f'{self.out_name}.arf' + + if save_name != None: + self.save_name = save_name + else: + self.save_name = self.out_name + + self.dpi = dpi + + self.arf = fits.open(self.file_name) # read file + + # SPECRESP HDU + self.specresp_hdu = self.arf["SPECRESP"] + + self.areas = np.array(self.specresp_hdu.data["SPECRESP"]) + self.Em_lo = np.array(self.specresp_hdu.data["ENERG_LO"]) + self.Em_hi = np.array(self.specresp_hdu.data["ENERG_HI"]) + + E_center = (self.Em_lo + self.Em_hi) / 2 + E_edges = np.append(self.Em_lo, self.Em_hi[-1]) + + fig, ax = plt.subplots() + ax.hist(E_center, E_edges, weights=self.areas, histtype='step') + + ax.set_title("Effective area") + ax.set_xlabel("Energy[$keV$]") + ax.set_ylabel(r"Effective area [$cm^2$]") + ax.set_xscale("log") + fig.savefig(f"Effective_area_for_{self.save_name}.png", bbox_inches="tight", pad_inches=0.1, dpi=self.dpi) + # fig.show() + + return + + def plot_rmf(self, file_name=None, save_name=None, dpi=300): + + """ + Read the rmf fits file, plot and save it. + + Parameters + ---------- + file_name: str, optional + The directory if the arf fits file (the default is `None`, which implies the file name will be read from the instance). + save_name: str, optional + The name of the saved image of effective area (the default is `None`, which implies the file name will be read from the instance). + dpi: int, optional + The dpi of the saved image (the default is 300). + """ + + if file_name != None: + self.file_name = file_name + else: + self.file_name = f'{self.out_name}.rmf' + + if save_name != None: + self.save_name = save_name + else: + self.save_name = self.out_name + + self.dpi = dpi + + # Read rmf file + self.rmf = fits.open(self.file_name) # read file + + # Read the ENOUNDS information + ebounds_ext = self.rmf["EBOUNDS"] + channel_low = ebounds_ext.data[ + "E_MIN"] # energy bin lower edges for channels (channels are just incident energy bins) + channel_high = ebounds_ext.data[ + "E_MAX"] # energy bin higher edges for channels (channels are just incident energy bins) + + # Read the MATRIX extension + matrix_ext = self.rmf['MATRIX'] + # logger.info(repr(matrix_hdu.header[:60])) + energy_low = matrix_ext.data["ENERG_LO"] # energy bin lower edges for measured energies + energy_high = matrix_ext.data["ENERG_HI"] # energy bin higher edges for measured energies + data = matrix_ext.data + + # Create a 2-d numpy array and store probability data into the redistribution matrix + rmf_matrix = np.zeros((len(energy_low), len(channel_low))) # create an empty matrix + for i in np.arange(data.shape[0]): # i is the measured energy index, examine the matrix_ext.data rows by rows + if data[i][5].sum() == 0: # if the sum of probabilities is zero, then skip since there is no data at all + pass + else: + # measured_energy_index = np.argwhere(energy_low == data[157][0])[0][0] + f_chan = data[i][3] # get the starting channel of each subsets + n_chann = data[i][4] # get the number of channels in each subsets + matrix = data[i][5] # get the probabilities of this row (incident energy) + indices = [] + for k in f_chan: + channels = 0 + channels = np.arange(k, k + n_chann[np.argwhere(f_chan == k)]).tolist() # generate the cha + indices += channels # fappend the channels togeter + indices = np.array(indices) + for m in indices: + rmf_matrix[i][m] = matrix[ + np.argwhere(indices == m)[0][0]] # write the probabilities into the empty matrix + + # plot the redistribution matrix + xcenter = np.divide(energy_low + energy_high, 2) + x_center_coords = np.repeat(xcenter, 10) + y_center_coords = np.tile(xcenter, 10) + energy_all_edges = np.append(energy_low, energy_high[-1]) + # bin_edges = np.array([incident_energy_bins,incident_energy_bins]) # doesn't work + bin_edges = np.vstack((energy_all_edges, energy_all_edges)) + # logger.info(bin_edges) + + self.probability = [] + for i in np.arange(10): + for j in np.arange(10): + self.probability.append(rmf_matrix[i][j]) + # logger.info(type(probability)) + + plt.hist2d(x=x_center_coords, y=y_center_coords, weights=self.probability, bins=bin_edges, norm=LogNorm()) + plt.xscale('log') + plt.yscale('log') + plt.xlabel("Incident energy [$keV$]") + plt.ylabel("Measured energy [$keV$]") + plt.title("Redistribution matrix") + # plt.xlim([70,10000]) + # plt.ylim([70,10000]) + plt.colorbar(norm=LogNorm()) + plt.savefig(f"Redistribution_matrix_for_{self.save_name}.png", bbox_inches="tight", pad_inches=0.1, dpi=300) + # plt.show() + + return \ No newline at end of file diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py new file mode 100644 index 00000000..f9803229 --- /dev/null +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -0,0 +1,544 @@ +import numpy as np + +import astropy.units as u + +from astropy.time import Time +from astropy.coordinates import SkyCoord, EarthLocation, GCRS, \ + concatenate_representations, ITRS +from histpy import Histogram, HealpixAxis, TimeAxis +from mhealpy import HealpixMap + +from scoords import Attitude, SpacecraftFrame + +from .scatt_map import SpacecraftAttitudeMap + +from typing import Union + +import logging +logger = logging.getLogger(__name__) + +__all__ = ["SpacecraftFile"] + +class SpacecraftFile: + + def __init__(self, + obstime: Time, + attitude: Attitude, + location: Union[EarthLocation, GCRS, ITRS], + livetime: u.Quantity = None): + """ + Handles the spacecraft orientation. Calculates the dwell obstime + map and point source response over a certain orientation period. + Exports the point source response as RMF and ARF files that can be read by XSPEC. + + Parameters + ---------- + obstime: + The obstime stamps for each pointings. Note this is NOT the obstime duration, see "livetime". + attitude: + Spacecraft orientation with respect to an inertial system. + location: + Location of the spacecraft at each timestamp. + livetime: + Time the instrument was live for the corresponding + obstime bin. Should have one less element than the number of + timestamps. If not provided, it will assume that the instrument + was fully on without interrruptions. + """ + + time_axis = TimeAxis(obstime, copy = False, label= 'obstime') + + if livetime is None: + livetime = time_axis.widths.to(u.s) + + self._hist = Histogram(time_axis, livetime, copy_contents = False) + + if not (location.shape == () or location.shape == obstime.shape): + raise ValueError(f"'location' must be a scalar or have the same length as the timestamps ({obstime.shape}), but it has shape ({location.shape})") + + if not (attitude.shape == () or attitude.shape == obstime.shape): + raise ValueError(f"'attitude' must be a scalar or have the same length as the timestamps ({obstime.shape}), but it has shape ({attitude.shape})") + + self._attitude = attitude + + self._location = self._standardize_location(location) + + def _standardize_location(self, location: Union[EarthLocation, GCRS, ITRS]): + + if isinstance(location, EarthLocation): + # Already the standard format + return location + + elif isinstance(location, GCRS): + # GCRS -> ITRS and call again + return self._standardize_location(location.transform_to(ITRS(self.obstime))) + + elif isinstance(location, ITRS): + # ITRS -> EarthLocation + return location.earth_location + + else: + raise TypeError(f"Location type {type(location)} not supported.") + + @property + def nintervals(self): + return self._hist.nbins + + @property + def intervals_duration(self): + return self._hist.axis.widths.to(self._hist.unit) + + @property + def intervals_tstart(self): + return self._hist.axis.lower_bounds + + @property + def intervals_tstop(self): + return self._hist.axis.upper_bounds + + @property + def tstart(self): + return self._hist.axis.lo_lim + + @property + def tstop(self): + return self._hist.axis.hi_lim + + @property + def npoints(self): + return self._hist.nbins + 1 + + @property + def obstime(self): + return self._hist.axis.edges + + @property + def livetime(self): + return self._hist.contents + + @property + def attitude(self): + return self._attitude + + @property + def location(self)->EarthLocation: + return self._location + + @classmethod + def parse_from_file(cls, file) -> "SpacecraftFile": + + """ + Parses timestamps, axis positions from file and returns to __init__. + + Parameters + ---------- + file : str + The file path of the pointings. + + Returns + ------- + cosipy.spacecraftfile.spacecraft_file + The SpacecraftFile object. + """ + + # Current SC format: + # 0: Always "OG" (for orbital geometry?) + # 1: obstime: timestamp in unix seconds + # 2: lat_x: galactic latitude of SC x-axis (deg) + # 3: lon_x: galactic longitude of SC x-axis (deg) + # 4: lat_z galactic latitude of SC z-axis (deg) + # 5: lon_z: galactic longitude of SC y-axis (deg) + # 6: altitude: altitude above from Earth's ellipsoid (km) + # 7: Earth_lat: galactic latitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 8: Earth_lon: galactic longitude of the direction the Earth's zenith is pointing to at the SC location (deg) + # 9: livetime (previously called SAA): accumulated uptime up to the following entry (seconds) + + time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), unpack = True, + delimiter=' ', skiprows=1, comments=("#", "EN")) + time = Time(time, format="unix") + + xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") + zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") + + attitude = Attitude.from_axes(x=xpointings, z=zpointings, frame = 'galactic') + + livetime = livetime[:-1]*u.s # The last element is 0. + + # Currently, the orbit information is in a weird format. + # The altitude it's with respect to the Earth's source, like + # you would specify it in a geodetic format, while + # the lon/lat is specified in J2000, like you would in ECI. + # Eventually everything should be in ECI (GCRS in astropy + # for all purposes), but for now let's do the conversion. + # 1. Get the direction in galactic + # 2. Transform to GCRS, which uses RA/Dec (ICRS-like). + # This is represented in the unit sphere + # 3. Add the altitude by transforming to EarthLocation. + # Should take care of the non-spherical Earth + # 4. Go back GCRS, now with the correct distance + # (from the Earth's center) + zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic") + gcrs = zenith_gal.transform_to('gcrs') + earth_loc = EarthLocation.from_geodetic(lon=gcrs.ra, lat=gcrs.dec, height=altitude*u.km) + + return cls(time, attitude, earth_loc, livetime) + + def _interp_attitude(self, points, weights) -> Attitude: + """ + + Parameters + ---------- + points + weights + + Returns + ------- + + """ + + # TODO: we could do a better interpolation using more points, or + # additional ACS data e.g. the rotation speed + + x,y,z = self._attitude.as_axes() + + x_interp = x[points[0]]*weights[0] + x[points[1]]*weights[1] + y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] + + interp_attitude = Attitude.from_axes(x_interp,y_interp) + + return interp_attitude + + def interp_attitude(self, time) -> Attitude: + """ + + Returns + ------- + + """ + + points, weights = self.interp_weights(time) + + return self._interp_attitude(points, weights) + + def _interp_location(self, points, weights) -> GCRS: + """ + + Parameters + ---------- + points + weights + + Returns + ------- + + """ + + # TODO: we could do a better interpolation using more points and orbital dynamics + + x,y,z = self._location.itrs.represent_as('cartesian').xyz + + x_interp = x[points[0]] * weights[0] + x[points[1]] * weights[1] + y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] + z_interp = z[points[0]] * weights[0] + z[points[1]] * weights[1] + + interp_location = GCRS(x=x_interp, y=y_interp, z=z_interp, representation_type='cartesian') + + return interp_location + + def interp_location(self, time) -> GCRS: + """ + + Returns + ------- + """ + + points, weights = self.interp_weights(time) + + return self._interp_location(points, weights) + + def _cumulative_livetime(self, points, weights) -> u.Quantity: + + cum_livetime_discrete = np.append(0 * self._hist.unit, np.cumsum(self.livetime)) + + up_to_tstart = cum_livetime_discrete[points[0]] + + within_bin = self.livetime[points[0]] * weights[1] + + cum_livetime = up_to_tstart + within_bin + + return cum_livetime + + def cumulative_livetime(self, time: Time) -> u.Quantity: + """ + Get the cumulative live obstime up to this obstime. + + The live obstime in between the internal timestamp is + assumed constant. + + Parameters + ---------- + time: + Timestamps + + Returns + ------- + Cummulative live obstime, with units. + """ + + points, weights = self.interp_weights(time) + + return self._cumulative_livetime(points, weights) + + def interp_weights(self, times: Time): + return self._hist.axis.interp_weights_edges(times) + + def interp(self, times: Time) -> 'SpacecraftFile': + + """ + Linearly interpolates attitude and position at a given obstime + + Parameters + ---------- + times: + Timestamps to interpolate + + Returns + ------- + A new SpacecraftFile object interpolated at these location + """ + + if times.size < 2: + raise ValueError("We need at least two obstime stamps. See also interp_attitude and inter_location") + + points, weights = self.interp_weights(times) + + interp_attitude = self._interp_attitude(points, weights) + interp_location = self._interp_location(points, weights) + + cum_livetime = self._cumulative_livetime(points, weights) + diff_livetime = cum_livetime[1:] - cum_livetime[:-1] + + return self.__class__(times, interp_attitude, interp_location, diff_livetime) + + def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftFile": + """ + Returns the SpacecraftFile file class object for the source interval. + + Parameters + ---------- + start : astropy.time.Time + The start obstime of the orientation period. Start of history by default. + stop : astropy.time.Time + The end obstime of the orientation period. End of history by default. + + Returns + ------- + cosipy.spacecraft.SpacecraftFile + """ + + if start is None: + start = self.tstart + + if stop is None: + stop = self.tstop + + if start < self.tstart or stop > self.tstop: + raise ValueError(f"Input range ({start}-{stop}) is outside the SC history ({self.tstart}-{self.tstop})") + + start_points, start_weights = self.interp_weights(start) + stop_points, stop_weights = self.interp_weights(stop) + + start_attitude = self._interp_attitude(start_points, start_weights) + stop_attitude = self._interp_attitude(stop_points, stop_weights) + + att_rot = self._attitude.as_matrix() + new_attitude = Attitude.from_matrix(np.append(start_attitude.as_matrix(), + np.append(att_rot[start_points[1]:stop_points[1]], + stop_attitude.as_matrix())), + frame = self._attitude.frame) + + start_location = self._interp_location(start_points, start_weights) + stop_location = self._interp_location(stop_points, stop_weights) + + new_location = concatenate_representations((start_location, self._location[start_points[1]:stop_points[1]], stop_location)) + + first_livetime = self.livetime[start_points[0]]*start_weights[1] + last_livetime = self.livetime[stop_points[0]]*stop_weights[1] + + new_livetime = np.append(first_livetime, np.append(self.livetime[start_points[1]:stop_points[0]], last_livetime)) + + new_time = np.concatenate((start, self.obstime[start_points[1]:stop_points[1]], stop)) + + return self.__class__(new_time, new_attitude, new_location, new_livetime) + + + def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: + + """ + Get the location in spacecraft coordinates for a given target + in inertial coordinates. + + Parameters + ---------- + target_coord : astropy.coordinates.SkyCoord + The coordinates of the target object. + + Returns + ------- + astropy.coordinates.SkyCoord + The target coordinates in the spacecraft frame. + """ + + logger.info("Now converting to the Spacecraft frame...") + + src_path = SkyCoord(np.dot(self.attitude.rot.inv().as_matrix(), target_coord.cartesian.xyz.value), + representation_type = 'cartesian', + frame = SpacecraftFrame()) + + src_path.representation_type = 'spherical' + + return src_path + + def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring') -> HealpixMap: + + """ + Generates the dwell obstime map for the source. + + Parameters + ---------- + target_coord: + Source coordinate + nside: + Healpix NSIDE + scheme: + Healpix pixel ordering scheme + + Returns + ------- + mhealpy.containers.healpix_map.HealpixMap + The dwell obstime map. + """ + + # Get source path + src_path_skycoord = self.get_target_in_sc_frame(target_coord) + + # Empty map + dwell_map = HealpixMap(nside = nside, + scheme = scheme, + coordsys = SpacecraftFrame()) + + # Fill + # Get the unique pixels to weight, and sum all the correspondint weights first, so + # each pixels needs to be called only once. + # Based on https://stackoverflow.com/questions/23268605/grouping-indices-of-unique-elements-in-numpy + + # remove the last value. Effectively a 0th order interpolations + pixels, weights = dwell_map.get_interp_weights(theta=src_path_skycoord[:-1]) + + weighted_duration = weights * self.livetime.to_value(u.second)[None] + + pixels = pixels.flatten() + weighted_duration = weighted_duration.flatten() + + pixels_argsort = np.argsort(pixels) + + pixels = pixels[pixels_argsort] + weighted_duration = weighted_duration[pixels_argsort] + + first_unique = np.concatenate(([True], pixels[1:] != pixels[:-1])) + + pixel_unique = pixels[first_unique] + + splits = np.nonzero(first_unique)[0][1:] + pixel_durations = [np.sum(weighted_duration[start:stop]) for start, stop in + zip(np.append(0, splits), np.append(splits, pixels.size))] + + for pix, dur in zip(pixel_unique, pixel_durations): + dwell_map[pix] += dur + + dwell_map.to(u.second, update=False, copy=False) + + return dwell_map + + def get_scatt_map(self, + nside, + target_coord=None, + scheme = 'ring', + coordsys = 'galactic', + r_earth = 6378.0, + earth_occ = True + ): + + """ + Bin the spacecraft attitude history into a 4D histogram that + contains the accumulated obstime the axes of the spacecraft where + looking at a given direction. + + Parameters + ---------- + target_coord : astropy.coordinates.SkyCoord, optional + The coordinates of the target object. + nside : int + The nside of the scatt map. + scheme : str, optional + The scheme of the scatt map (the default is "ring") + coordsys : str, optional + The coordinate system used in the scatt map (the default is "galactic). + r_earth : float, optional + Earth radius in km (default is 6378 km). + earth_occ : bool, optional + Option to include Earth occultation in scatt map calculation. + Default is True. + + Returns + ------- + h_ori : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap + The spacecraft attitude map. + """ + + # Check if target_coord is needed + if earth_occ and target_coord is None: + raise ValueError("target_coord is needed when earth_occ = True") + + # Get orientations + timestamps = self.obstime + attitudes = self.attitude + + # Altitude at each point in the orbit: + altitude = self._location.height + + # Earth zenith at each point in the orbit: + earth_zenith = self.location.itrs + + # Fill (only 2 axes needed to fully define the orientation) + h_ori = SpacecraftAttitudeMap(nside = nside, + scheme = scheme, + coordsys = coordsys) + + x,y,z = attitudes[:-1].as_axes() + + # Get max angle based on altitude: + max_angle = np.pi - np.arcsin(r_earth/(r_earth + altitude)) + max_angle *= (180/np.pi) # angles in degree + + # Define weights and set to 0 if blocked by Earth: + weight = self.livetime*u.s + + if earth_occ: + # Calculate angle between source direction and Earth zenith + # for each obstime stamp: + src_angle = target_coord.separation(earth_zenith) + + # Get pointings that are occulted by Earth: + earth_occ_index = src_angle.value >= max_angle + + # Mask + weight[earth_occ_index[:-1]] = 0 + + # Fill histogram: + h_ori.fill(x, y, weight = weight) + + return h_ori + + + diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index f4e78c60..b90226db 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -1,5 +1,5 @@ +from cosipy.response import FullDetectorResponse from cosipy import test_data -from pytest import approx from cosipy import SpacecraftFile import numpy as np import astropy.units as u @@ -15,7 +15,7 @@ def test_get_time(): ori = SpacecraftFile.parse_from_file(ori_path) - assert np.allclose(ori.get_time().value, + assert np.allclose(ori.obstime.unix, [1835478000.0, 1835478001.0, 1835478002.0, 1835478003.0, 1835478004.0, 1835478005.0, 1835478006.0, 1835478007.0, 1835478008.0, @@ -26,20 +26,22 @@ def test_get_time_delta(): ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftFile.parse_from_file(ori_path) - time_delta = ori.get_time_delta() - time_delta.format = "sec" + time_delta = ori.intervals_duration.to_value(u.s) - assert np.allclose(time_delta.value, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, - 1.000000, 1.000000, 1.000000, 1.000000, 1.000000])) + assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, + 1.000000, 1.000000, 1.000000, 1.000000, 1.000000])) + time_delta = ori.livetime.to_value(u.s) + assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, + 1.000000, 1.000000, 1.000000, 1.000000, 1.000000])) def test_get_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftFile.parse_from_file(ori_path) - attitude = ori.get_attitude() + attitude = ori.attitude matrix = np.array([[[0.215904, -0.667290, -0.712818], [0.193436, 0.744798, -0.638638], @@ -93,10 +95,9 @@ def test_get_target_in_sc_frame(): ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftFile.parse_from_file(ori_path) - target_name = "Crab" target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) + path_in_sc = ori.get_target_in_sc_frame(target_coord) assert np.allclose(path_in_sc.lon.deg, np.array([118.393522, 118.425255, 118.456868, 118.488362, 118.519735, @@ -109,17 +110,14 @@ def test_get_target_in_sc_frame(): def test_get_dwell_map(): - response_path =test_data.path / "test_full_detector_response.h5" ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftFile.parse_from_file(ori_path) - target_name = "Crab" target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - - dwell_map = ori.get_dwell_map(response = response_path) + + dwell_map = ori.get_dwell_map(target_coord, nside=1, scheme = 'ring') - assert np.allclose(dwell_map[:].value, + assert np.allclose(dwell_map[:].to_value(u.s), np.array([1.895057, 7.615584, 0.244679, 0.244679, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000])) From 62d45418d7dbe274dd0c3e6bdc542a221216c50f Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 1 May 2025 10:31:44 -0400 Subject: [PATCH 024/133] All SpacecraftFile tests passing Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 37 ++- tests/spacecraftfile/test_spacecraftfile.py | 292 +------------------- 2 files changed, 28 insertions(+), 301 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index f9803229..6aa320c5 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -5,6 +5,7 @@ from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, GCRS, \ concatenate_representations, ITRS +from astropy.coordinates import concatenate as concatenate_skycoord from histpy import Histogram, HealpixAxis, TimeAxis from mhealpy import HealpixMap @@ -199,12 +200,9 @@ def _interp_attitude(self, points, weights) -> Attitude: # TODO: we could do a better interpolation using more points, or # additional ACS data e.g. the rotation speed - x,y,z = self._attitude.as_axes() + rot_matrix = self._attitude.as_matrix() - x_interp = x[points[0]]*weights[0] + x[points[1]]*weights[1] - y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] - - interp_attitude = Attitude.from_axes(x_interp,y_interp) + interp_attitude = Attitude.from_matrix(rot_matrix[points[0]]*weights[0] + rot_matrix[points[1]]*weights[1], frame = self._attitude.frame) return interp_attitude @@ -220,7 +218,7 @@ def interp_attitude(self, time) -> Attitude: return self._interp_attitude(points, weights) - def _interp_location(self, points, weights) -> GCRS: + def _interp_location(self, points, weights) -> EarthLocation: """ Parameters @@ -235,17 +233,19 @@ def _interp_location(self, points, weights) -> GCRS: # TODO: we could do a better interpolation using more points and orbital dynamics - x,y,z = self._location.itrs.represent_as('cartesian').xyz + x = self._location.x + y = self._location.y + z = self._location.z x_interp = x[points[0]] * weights[0] + x[points[1]] * weights[1] y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] z_interp = z[points[0]] * weights[0] + z[points[1]] * weights[1] - interp_location = GCRS(x=x_interp, y=y_interp, z=z_interp, representation_type='cartesian') + interp_location = EarthLocation.from_geocentric(x=x_interp, y=y_interp, z=z_interp) return interp_location - def interp_location(self, time) -> GCRS: + def interp_location(self, time) -> EarthLocation: """ Returns @@ -352,22 +352,29 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftFil stop_attitude = self._interp_attitude(stop_points, stop_weights) att_rot = self._attitude.as_matrix() - new_attitude = Attitude.from_matrix(np.append(start_attitude.as_matrix(), + new_attitude = Attitude.from_matrix(np.append(start_attitude.as_matrix()[None], np.append(att_rot[start_points[1]:stop_points[1]], - stop_attitude.as_matrix())), + stop_attitude.as_matrix()[None], axis = 0), axis = 0), frame = self._attitude.frame) - start_location = self._interp_location(start_points, start_weights) - stop_location = self._interp_location(stop_points, stop_weights) + start_location = self._interp_location(start_points, start_weights)[None] + stop_location = self._interp_location(stop_points, stop_weights)[None] + center_locations = self._location[start_points[1]:stop_points[1]] - new_location = concatenate_representations((start_location, self._location[start_points[1]:stop_points[1]], stop_location)) + new_location = EarthLocation.from_geocentric(np.concatenate((start_location.x, center_locations.x, stop_location.x)), + np.concatenate((start_location.y, center_locations.y, stop_location.y)), + np.concatenate((start_location.z, center_locations.z, stop_location.z))) first_livetime = self.livetime[start_points[0]]*start_weights[1] last_livetime = self.livetime[stop_points[0]]*stop_weights[1] new_livetime = np.append(first_livetime, np.append(self.livetime[start_points[1]:stop_points[0]], last_livetime)) - new_time = np.concatenate((start, self.obstime[start_points[1]:stop_points[1]], stop)) + middle_times = self.obstime[start_points[1]:stop_points[1]] + + new_time = Time(np.concatenate(([start.jd1], middle_times.jd1, [stop.jd1])), + np.concatenate(([start.jd2], middle_times.jd2, [stop.jd2])), + format = 'jd') return self.__class__(new_time, new_attitude, new_location, new_livetime) diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index b90226db..61b3f870 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -121,303 +121,23 @@ def test_get_dwell_map(): np.array([1.895057, 7.615584, 0.244679, 0.244679, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000])) +def test_select_interval(): -def test_get_psr_rsp(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - - dwell_map = ori.get_dwell_map(response = response_path) - - Ei_edges, Ei_lo, Ei_hi, Em_edges, Em_lo, Em_hi, areas, matrix = ori.get_psr_rsp() - - assert np.allclose(Ei_edges, - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(Ei_lo, - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) - - assert np.allclose(Ei_hi, - np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(Em_edges, - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(Em_lo, - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) - - assert np.allclose(Em_hi, - np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(areas, - np.array([0.06089862, 0.4563752 , 1.1601573 , 1.6237522 , 2.0216975 , - 2.2039971 , 2.0773466 , 1.7005537 , 1.1626455 , 0.80194914])) - - assert np.allclose(matrix, - np.array([[9.80996430e-01, 4.68325317e-02, 1.82471890e-02, 9.86817386e-03, - 5.82037494e-03, 3.47572053e-03, 2.80415593e-03, 3.13903880e-03, - 4.89909900e-03, 6.68705115e-03], - [1.90035217e-02, 9.44634676e-01, 1.28470331e-01, 9.38407257e-02, - 4.32382338e-02, 2.23877952e-02, 1.63043533e-02, 1.73287615e-02, - 2.80312393e-02, 3.78256924e-02], - [0.00000000e+00, 8.53277557e-03, 8.48568857e-01, 2.18858123e-01, - 1.85861006e-01, 7.39495233e-02, 4.45922092e-02, 4.06639054e-02, - 6.96888119e-02, 9.27841067e-02], - [0.00000000e+00, 0.00000000e+00, 4.71363496e-03, 6.62667990e-01, - 6.19757064e-02, 2.71992888e-02, 1.51670892e-02, 1.46367634e-02, - 3.69769707e-02, 7.03022778e-02], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.47649962e-02, - 7.00923026e-01, 2.60504693e-01, 9.65307504e-02, 7.03864172e-02, - 1.15635686e-01, 1.53913230e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 2.18164618e-03, 6.11085474e-01, 2.28024259e-01, 9.29291621e-02, - 1.14003479e-01, 1.54005408e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 1.39757351e-03, 5.95472097e-01, 2.54652113e-01, - 1.32362068e-01, 1.71157718e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 1.10507896e-03, 5.05610526e-01, - 2.00507417e-01, 1.41500503e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 6.53312833e-04, - 2.97714621e-01, 1.26633704e-01], - [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, - 1.80651987e-04, 4.51902114e-02]])) - -def test_get_arf(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - - dwell_map = ori.get_dwell_map(response = response_path) - - _ = ori.get_psr_rsp() - - ori.get_arf(out_name = "test") - - fits_file = fits.open("test.arf") - - assert np.allclose(fits_file[1].data.field("ENERG_LO"), - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) - - assert np.allclose(fits_file[1].data.field("ENERG_HI"), - np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(fits_file[1].data.field("SPECRESP"), - np.array([0.06089862, 0.4563752 , 1.1601573 , 1.6237522 , 2.0216975 , - 2.2039971 , 2.0773466 , 1.7005537 , 1.1626455 , 0.80194914])) - - os.remove("test.arf") - -def test_get_rmf(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - - dwell_map = ori.get_dwell_map(response = response_path) - - _ = ori.get_psr_rsp() - - ori.get_rmf(out_name = "test") - - fits_file = fits.open("test.rmf") - - assert np.allclose(fits_file[1].data.field("ENERG_LO"), - np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) - - assert np.allclose(fits_file[1].data.field("ENERG_HI"), - np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) - - assert np.allclose(fits_file[1].data.field("N_GRP"), - np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])) - - matrix_flattened = [] - for i in fits_file[1].data.field("MATRIX"): - matrix_flattened += i.tolist() - - assert np.allclose(matrix_flattened, - [0.9809964299201965, - 0.019003521651029587, - 0.046832531690597534, - 0.9446346759796143, - 0.008532775565981865, - 0.01824718900024891, - 0.12847033143043518, - 0.848568856716156, - 0.0047136349603533745, - 0.009868173860013485, - 0.09384072571992874, - 0.21885812282562256, - 0.662667989730835, - 0.014764996245503426, - 0.005820374935865402, - 0.043238233774900436, - 0.1858610063791275, - 0.06197570636868477, - 0.7009230256080627, - 0.00218164618127048, - 0.003475720528513193, - 0.02238779515028, - 0.07394952327013016, - 0.027199288830161095, - 0.26050469279289246, - 0.6110854744911194, - 0.0013975735055282712, - 0.0028041559271514416, - 0.01630435325205326, - 0.04459220916032791, - 0.01516708917915821, - 0.09653075039386749, - 0.22802425920963287, - 0.5954720973968506, - 0.001105078961700201, - 0.0031390388030558825, - 0.017328761518001556, - 0.04066390544176102, - 0.014636763371527195, - 0.07038641721010208, - 0.0929291620850563, - 0.25465211272239685, - 0.5056105256080627, - 0.000653312832582742, - 0.004899099003523588, - 0.0280312392860651, - 0.0696888118982315, - 0.03697697073221207, - 0.11563568562269211, - 0.11400347948074341, - 0.13236206769943237, - 0.20050741732120514, - 0.29771462082862854, - 0.0001806519867386669, - 0.006687051150947809, - 0.03782569244503975, - 0.0927841067314148, - 0.07030227780342102, - 0.1539132297039032, - 0.15400540828704834, - 0.17115771770477295, - 0.14150050282478333, - 0.12663370370864868, - 0.04519021138548851]) - - os.remove("test.rmf") - - -def test_get_pha(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - dwell_map = ori.get_dwell_map(response = response_path) - _ = ori.get_psr_rsp() - ori.get_arf(out_name = "test") - ori.get_rmf(out_name = "test") - - counts = np.array([0.01094232, 0.04728866, 0.06744612, 0.01393708, 0.05420688, - 0.03141498, 0.01818584, 0.00717219, 0.00189568, 0.00010503])*1000 - - errors = np.sqrt(counts) - - ori.get_pha(src_counts=counts, errors=errors, exposure_time=10) - - os.remove("test.arf") - os.remove("test.rmf") - - fits_file = fits.open("test.pha") - os.remove("test.pha") - - assert np.allclose(fits_file[1].data.field("CHANNEL"), - np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])) - - assert np.allclose(fits_file[1].data.field("COUNTS"), - np.array([10, 47, 67, 13, 54, 31, 18, 7, 1, 0])) - - assert np.allclose(fits_file[1].data.field("STAT_ERR"), - np.array([3, 6, 8, 3, 7, 5, 4, 2, 1, 0])) - -def test_plot_arf(): - - response_path = test_data.path / "test_full_detector_response.h5" ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - dwell_map = ori.get_dwell_map(response = response_path) - _ = ori.get_psr_rsp() - ori.get_arf(out_name = "test") - - ori.plot_arf() - - assert Path("Effective_area_for_test.png").exists() - - os.remove("test.arf") - os.remove("Effective_area_for_test.png") - -def test_plot_rmf(): - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) - - target_name = "Crab" - target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") - - path_in_sc = ori.get_target_in_sc_frame(target_name, target_coord) - dwell_map = ori.get_dwell_map(response = response_path) - _ = ori.get_psr_rsp() - ori.get_rmf(out_name = "test") - - ori.plot_rmf() - - assert Path("Redistribution_matrix_for_test.png").exists() - - os.remove("test.rmf") - os.remove("Redistribution_matrix_for_test.png") - -def test_source_interval(): - - response_path = test_data.path / "test_full_detector_response.h5" - ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + new_ori = ori.select_interval(ori.tstart+0.1*u.s, ori.tstart+2.1*u.s) - new_ori = ori.source_interval(Time(ori._load_time[0]+0.1, format = "unix"), - Time(ori._load_time[0]+2.1, format = "unix")) + x, y, z = new_ori.attitude.as_axes() - assert np.allclose(new_ori._load_time, + assert np.allclose(new_ori.obstime.unix, np.array([1.835478e+09, 1.835478e+09, 1.835478e+09, 1.835478e+09])) - assert np.allclose(new_ori._x_direction.flatten(), + assert np.allclose(np.asarray([x.transform_to('galactic').l.deg, x.transform_to('galactic').b.deg]).transpose().flatten(), np.array([41.86062093, 73.14368765, 41.88225011, 73.09517927, 41.90629597, 73.0412838 , 41.9087019 , 73.03589454])) - assert np.allclose(new_ori._z_direction.flatten(), + assert np.allclose(np.asarray([z.transform_to('galactic').l.deg, z.transform_to('galactic').b.deg]).transpose().flatten(), np.array([221.86062093, 16.85631235, 221.88225011, 16.90482073, 221.90629597, 16.9587162 , 221.9087019 , 16.96410546])) From e70cae79685f1f905bd0d729ec6796009498e338 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 1 May 2025 11:03:46 -0400 Subject: [PATCH 025/133] All rsp to arf&rmf converter tests are passing Signed-off-by: Israel Martinez --- cosipy/__init__.py | 4 +- cosipy/spacecraftfile/__init__.py | 1 + cosipy/spacecraftfile/rsp_to_arf_rmf.py | 6 +- cosipy/spacecraftfile/spacecraft_file.py | 4 +- .../spacecraftfile/test_arf_rmf_converter.py | 272 ++++++++++++++++++ 5 files changed, 279 insertions(+), 8 deletions(-) create mode 100644 tests/spacecraftfile/test_arf_rmf_converter.py diff --git a/cosipy/__init__.py b/cosipy/__init__.py index c6d47094..48489fc5 100644 --- a/cosipy/__init__.py +++ b/cosipy/__init__.py @@ -2,6 +2,8 @@ from .response import DetectorResponse +from .spacecraftfile import * + from .data_io import DataIO from .data_io import UnBinnedData from .data_io import BinnedData @@ -10,8 +12,6 @@ from .threeml import COSILike from .threeml import Band_Eflux -from .spacecraftfile import * - from .ts_map import FastTSMap from .source_injector import SourceInjector diff --git a/cosipy/spacecraftfile/__init__.py b/cosipy/spacecraftfile/__init__.py index 9eb8444d..4f12a593 100644 --- a/cosipy/spacecraftfile/__init__.py +++ b/cosipy/spacecraftfile/__init__.py @@ -1,2 +1,3 @@ from .spacecraft_file import * +from .rsp_to_arf_rmf import RspArfRmfConverter from .scatt_map import SpacecraftAttitudeMap diff --git a/cosipy/spacecraftfile/rsp_to_arf_rmf.py b/cosipy/spacecraftfile/rsp_to_arf_rmf.py index 7d5ae516..bfbd9cba 100644 --- a/cosipy/spacecraftfile/rsp_to_arf_rmf.py +++ b/cosipy/spacecraftfile/rsp_to_arf_rmf.py @@ -1,7 +1,7 @@ import logging logger = logging.getLogger(__name__) -from cosipy import SpacecraftFile +from .spacecraft_file import SpacecraftFile import numpy as np import astropy.units as u @@ -330,9 +330,9 @@ def get_pha(self, src_counts, errors, rmf_file=None, arf_file=None, bkg_file=Non else: self.arf_file = f'{self.out_name}.arf' - if exposure_time is None: + if exposure_time is not None: self.exposure_time = exposure_time - if dts is not None: + elif dts is not None: livetime = self.__str_or_array(dts) self.exposure_time = livetime.sum() self.telescope = telescope diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 6aa320c5..31d7225c 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -3,9 +3,7 @@ import astropy.units as u from astropy.time import Time -from astropy.coordinates import SkyCoord, EarthLocation, GCRS, \ - concatenate_representations, ITRS -from astropy.coordinates import concatenate as concatenate_skycoord +from astropy.coordinates import SkyCoord, EarthLocation, GCRS, ITRS from histpy import Histogram, HealpixAxis, TimeAxis from mhealpy import HealpixMap diff --git a/tests/spacecraftfile/test_arf_rmf_converter.py b/tests/spacecraftfile/test_arf_rmf_converter.py new file mode 100644 index 00000000..4aab68f7 --- /dev/null +++ b/tests/spacecraftfile/test_arf_rmf_converter.py @@ -0,0 +1,272 @@ +import os +from pathlib import Path + +import numpy as np +from astropy.coordinates import SkyCoord +from astropy.io import fits +from cosipy import test_data, SpacecraftFile +from cosipy.response import FullDetectorResponse +from cosipy.spacecraftfile import RspArfRmfConverter + +from astropy import units as u + +def test_get_psr_rsp(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + Ei_edges, Ei_lo, Ei_hi, Em_edges, Em_lo, Em_hi, areas, matrix = converter.get_psr_rsp() + + assert np.allclose(Ei_edges, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Ei_lo, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(Ei_hi, + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Em_edges, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(Em_lo, + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(Em_hi, + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(areas, + np.array([0.06089862, 0.4563752, 1.1601573, 1.6237522, 2.0216975, + 2.2039971, 2.0773466, 1.7005537, 1.1626455, 0.80194914])) + + assert np.allclose(matrix, + np.array([[9.80996430e-01, 4.68325317e-02, 1.82471890e-02, 9.86817386e-03, + 5.82037494e-03, 3.47572053e-03, 2.80415593e-03, 3.13903880e-03, + 4.89909900e-03, 6.68705115e-03], + [1.90035217e-02, 9.44634676e-01, 1.28470331e-01, 9.38407257e-02, + 4.32382338e-02, 2.23877952e-02, 1.63043533e-02, 1.73287615e-02, + 2.80312393e-02, 3.78256924e-02], + [0.00000000e+00, 8.53277557e-03, 8.48568857e-01, 2.18858123e-01, + 1.85861006e-01, 7.39495233e-02, 4.45922092e-02, 4.06639054e-02, + 6.96888119e-02, 9.27841067e-02], + [0.00000000e+00, 0.00000000e+00, 4.71363496e-03, 6.62667990e-01, + 6.19757064e-02, 2.71992888e-02, 1.51670892e-02, 1.46367634e-02, + 3.69769707e-02, 7.03022778e-02], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.47649962e-02, + 7.00923026e-01, 2.60504693e-01, 9.65307504e-02, 7.03864172e-02, + 1.15635686e-01, 1.53913230e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 2.18164618e-03, 6.11085474e-01, 2.28024259e-01, 9.29291621e-02, + 1.14003479e-01, 1.54005408e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 1.39757351e-03, 5.95472097e-01, 2.54652113e-01, + 1.32362068e-01, 1.71157718e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 1.10507896e-03, 5.05610526e-01, + 2.00507417e-01, 1.41500503e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 6.53312833e-04, + 2.97714621e-01, 1.26633704e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 1.80651987e-04, 4.51902114e-02]])) + + +def test_get_arf(): + + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + + converter.get_arf(out_name="test") + + fits_file = fits.open("test.arf") + + assert np.allclose(fits_file[1].data.field("ENERG_LO"), + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(fits_file[1].data.field("ENERG_HI"), + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(fits_file[1].data.field("SPECRESP"), + np.array([0.06089862, 0.4563752, 1.1601573, 1.6237522, 2.0216975, + 2.2039971, 2.0773466, 1.7005537, 1.1626455, 0.80194914])) + + os.remove("test.arf") + + +def test_get_rmf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + + converter.get_rmf(out_name="test") + + fits_file = fits.open("test.rmf") + + assert np.allclose(fits_file[1].data.field("ENERG_LO"), + np.array([150., 220., 325., 480., 520., 765., 1120., 1650., 2350., 3450.])) + + assert np.allclose(fits_file[1].data.field("ENERG_HI"), + np.array([220., 325., 480., 520., 765., 1120., 1650., 2350., 3450., 5000.])) + + assert np.allclose(fits_file[1].data.field("N_GRP"), + np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])) + + matrix_flattened = [] + for i in fits_file[1].data.field("MATRIX"): + matrix_flattened += i.tolist() + + assert np.allclose(matrix_flattened, + [0.9809964299201965, + 0.019003521651029587, + 0.046832531690597534, + 0.9446346759796143, + 0.008532775565981865, + 0.01824718900024891, + 0.12847033143043518, + 0.848568856716156, + 0.0047136349603533745, + 0.009868173860013485, + 0.09384072571992874, + 0.21885812282562256, + 0.662667989730835, + 0.014764996245503426, + 0.005820374935865402, + 0.043238233774900436, + 0.1858610063791275, + 0.06197570636868477, + 0.7009230256080627, + 0.00218164618127048, + 0.003475720528513193, + 0.02238779515028, + 0.07394952327013016, + 0.027199288830161095, + 0.26050469279289246, + 0.6110854744911194, + 0.0013975735055282712, + 0.0028041559271514416, + 0.01630435325205326, + 0.04459220916032791, + 0.01516708917915821, + 0.09653075039386749, + 0.22802425920963287, + 0.5954720973968506, + 0.001105078961700201, + 0.0031390388030558825, + 0.017328761518001556, + 0.04066390544176102, + 0.014636763371527195, + 0.07038641721010208, + 0.0929291620850563, + 0.25465211272239685, + 0.5056105256080627, + 0.000653312832582742, + 0.004899099003523588, + 0.0280312392860651, + 0.0696888118982315, + 0.03697697073221207, + 0.11563568562269211, + 0.11400347948074341, + 0.13236206769943237, + 0.20050741732120514, + 0.29771462082862854, + 0.0001806519867386669, + 0.006687051150947809, + 0.03782569244503975, + 0.0927841067314148, + 0.07030227780342102, + 0.1539132297039032, + 0.15400540828704834, + 0.17115771770477295, + 0.14150050282478333, + 0.12663370370864868, + 0.04519021138548851]) + + os.remove("test.rmf") + + +def test_get_pha(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_arf(out_name="test") + converter.get_rmf(out_name="test") + + counts = np.array([0.01094232, 0.04728866, 0.06744612, 0.01393708, 0.05420688, + 0.03141498, 0.01818584, 0.00717219, 0.00189568, 0.00010503]) * 1000 + + errors = np.sqrt(counts) + + converter.get_pha(src_counts=counts, errors=errors, exposure_time=10) + + os.remove("test.arf") + os.remove("test.rmf") + + fits_file = fits.open("test.pha") + os.remove("test.pha") + + assert np.allclose(fits_file[1].data.field("CHANNEL"), + np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])) + + assert np.allclose(fits_file[1].data.field("COUNTS"), + np.array([10, 47, 67, 13, 54, 31, 18, 7, 1, 0])) + + assert np.allclose(fits_file[1].data.field("STAT_ERR"), + np.array([3, 6, 8, 3, 7, 5, 4, 2, 1, 0])) + + +def test_plot_arf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_arf(out_name="test") + + converter.plot_arf() + + assert Path("Effective_area_for_test.png").exists() + + os.remove("test.arf") + os.remove("Effective_area_for_test.png") + + +def test_plot_rmf(): + response_path = test_data.path / "test_full_detector_response.h5" + response = FullDetectorResponse.open(response_path) + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftFile.parse_from_file(ori_path) + target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") + converter = RspArfRmfConverter(response, ori, target_coord) + + _ = converter.get_psr_rsp() + converter.get_rmf(out_name="test") + + converter.plot_rmf() + + assert Path("Redistribution_matrix_for_test.png").exists() + + os.remove("test.rmf") + os.remove("Redistribution_matrix_for_test.png") From c276bfba04550a12d9a6aec12b2e038c15dcf4be Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 1 May 2025 11:10:49 -0400 Subject: [PATCH 026/133] Rename SC file open. Eventually this function should be able to either open the official FITS file, or parse the .ori file. It's better to keep it generic for now. Signed-off-by: Israel Martinez --- cosipy/data_io/UnBinnedData.py | 4 ++-- cosipy/spacecraftfile/spacecraft_file.py | 4 ++-- .../response/extended_source_response_generator.py | 2 +- ..._source_response_generator_with_multiple_nodes.py | 2 +- .../test_coordsys_conversion_matrix.py | 2 +- tests/image_deconvolution/test_exposure_table.py | 2 +- tests/polarization/test_polarization_asad.py | 2 +- tests/response/test_full_detector_response.py | 4 ++-- tests/source_injector/test_source_injector.py | 2 +- tests/spacecraftfile/test_arf_rmf_converter.py | 12 ++++++------ tests/spacecraftfile/test_spacecraftfile.py | 12 ++++++------ tests/threeml/test_spectral_fitting.py | 2 +- tests/ts_map/test_fast_ts_map.py | 6 +++--- 13 files changed, 28 insertions(+), 28 deletions(-) diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index ddb823bd..0f8814e6 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -441,7 +441,7 @@ def instrument_pointing(self): """ # Get ori info: - ori = SpacecraftFile.parse_from_file(self.ori_file) + ori = SpacecraftFile.open(self.ori_file) time_tags = ori._load_time x_pointings = ori.x_pointings z_pointings = ori.z_pointings @@ -842,7 +842,7 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): self.cosi_dataset = self.get_dict(unbinned_data) # Get ori info: - ori = SpacecraftFile.parse_from_file(self.ori_file) + ori = SpacecraftFile.open(self.ori_file) # Get bad time intervals: bti = self.find_bad_intervals(ori._time, ori.livetime) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 31d7225c..f2a60db8 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -4,7 +4,7 @@ from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, GCRS, ITRS -from histpy import Histogram, HealpixAxis, TimeAxis +from histpy import Histogram, TimeAxis from mhealpy import HealpixMap from scoords import Attitude, SpacecraftFrame @@ -124,7 +124,7 @@ def location(self)->EarthLocation: return self._location @classmethod - def parse_from_file(cls, file) -> "SpacecraftFile": + def open(cls, file) -> "SpacecraftFile": """ Parses timestamps, axis positions from file and returns to __init__. diff --git a/docs/tutorials/response/extended_source_response_generator.py b/docs/tutorials/response/extended_source_response_generator.py index 57143d91..9936ee57 100644 --- a/docs/tutorials/response/extended_source_response_generator.py +++ b/docs/tutorials/response/extended_source_response_generator.py @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.parse_from_file(orientation_path) +orientation = SpacecraftFile.open(orientation_path) # generate your extended source response extended_source_response = full_detector_response.get_extended_source_response(orientation, diff --git a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py index 5843079a..7db71212 100644 --- a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py +++ b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.parse_from_file(orientation_path) +orientation = SpacecraftFile.open(orientation_path) # set the healpix pixel index list ipix_image_list = [int(_) for _ in sys.argv[1:]] diff --git a/tests/image_deconvolution/test_coordsys_conversion_matrix.py b/tests/image_deconvolution/test_coordsys_conversion_matrix.py index 65bd7f49..0858cd27 100644 --- a/tests/image_deconvolution/test_coordsys_conversion_matrix.py +++ b/tests/image_deconvolution/test_coordsys_conversion_matrix.py @@ -12,7 +12,7 @@ def test_coordsys_conversion_matrix_time(tmp_path): full_detector_response = FullDetectorResponse.open(test_data.path / "test_full_detector_response.h5") - ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftFile.open(test_data.path / "20280301_first_10sec.ori") ccm = CoordsysConversionMatrix.time_binning_ccm(full_detector_response, ori, [ori.get_time()[0].value, ori.get_time()[-1].value] * u.s) diff --git a/tests/image_deconvolution/test_exposure_table.py b/tests/image_deconvolution/test_exposure_table.py index 91e632a3..34ce34f2 100644 --- a/tests/image_deconvolution/test_exposure_table.py +++ b/tests/image_deconvolution/test_exposure_table.py @@ -8,7 +8,7 @@ def test_exposure_table(tmp_path): nside = 1 - ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftFile.open(test_data.path / "20280301_first_10sec.ori") assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.get_time()[-1], min_exposure=0, min_num_pointings=1) == None diff --git a/tests/polarization/test_polarization_asad.py b/tests/polarization/test_polarization_asad.py index 0a1618a4..4d8944fb 100644 --- a/tests/polarization/test_polarization_asad.py +++ b/tests/polarization/test_polarization_asad.py @@ -13,7 +13,7 @@ analysis = UnBinnedData(test_data.path / 'polarization_data.yaml') data = analysis.get_dict_from_hdf5(test_data.path / 'polarization_data.hdf5') response_path = test_data.path / 'test_polarization_response_dense.h5' -sc_orientation = SpacecraftFile.parse_from_file(test_data.path / 'polarization_ori.ori') +sc_orientation = SpacecraftFile.open(test_data.path / 'polarization_ori.ori') attitude = sc_orientation.get_attitude()[0] a = 10. * u.keV diff --git a/tests/response/test_full_detector_response.py b/tests/response/test_full_detector_response.py index a3489641..f58bf8e5 100644 --- a/tests/response/test_full_detector_response.py +++ b/tests/response/test_full_detector_response.py @@ -79,7 +79,7 @@ def test_get_interp_response(): def test_get_extended_source_response(): - orientation = SpacecraftFile.parse_from_file(orientation_path) + orientation = SpacecraftFile.open(orientation_path) with FullDetectorResponse.open(response_path) as response: @@ -98,7 +98,7 @@ def test_get_extended_source_response(): def test_merge_psr_to_extended_source_response(tmp_path): - orientation = SpacecraftFile.parse_from_file(orientation_path) + orientation = SpacecraftFile.open(orientation_path) with FullDetectorResponse.open(response_path) as response: diff --git a/tests/source_injector/test_source_injector.py b/tests/source_injector/test_source_injector.py index 4b21ed16..13af2e5b 100644 --- a/tests/source_injector/test_source_injector.py +++ b/tests/source_injector/test_source_injector.py @@ -15,7 +15,7 @@ def test_inject_point_source(): # defind the response and orientation response_path = test_data.path / "test_full_detector_response_dense.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftFile.open(orientation_path) # powerlaw model index = -2.2 diff --git a/tests/spacecraftfile/test_arf_rmf_converter.py b/tests/spacecraftfile/test_arf_rmf_converter.py index 4aab68f7..ed4abadd 100644 --- a/tests/spacecraftfile/test_arf_rmf_converter.py +++ b/tests/spacecraftfile/test_arf_rmf_converter.py @@ -14,7 +14,7 @@ def test_get_psr_rsp(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -80,7 +80,7 @@ def test_get_arf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -107,7 +107,7 @@ def test_get_rmf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -203,7 +203,7 @@ def test_get_pha(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -238,7 +238,7 @@ def test_plot_arf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -257,7 +257,7 @@ def test_plot_rmf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index 61b3f870..e4f9508f 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -13,7 +13,7 @@ def test_get_time(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) assert np.allclose(ori.obstime.unix, [1835478000.0, 1835478001.0, 1835478002.0, @@ -25,7 +25,7 @@ def test_get_time(): def test_get_time_delta(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) time_delta = ori.intervals_duration.to_value(u.s) assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, @@ -39,7 +39,7 @@ def test_get_time_delta(): def test_get_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) attitude = ori.attitude @@ -93,7 +93,7 @@ def test_get_attitude(): def test_get_target_in_sc_frame(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") @@ -111,7 +111,7 @@ def test_get_target_in_sc_frame(): def test_get_dwell_map(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") @@ -124,7 +124,7 @@ def test_get_dwell_map(): def test_select_interval(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.parse_from_file(ori_path) + ori = SpacecraftFile.open(ori_path) new_ori = ori.select_interval(ori.tstart+0.1*u.s, ori.tstart+2.1*u.s) diff --git a/tests/threeml/test_spectral_fitting.py b/tests/threeml/test_spectral_fitting.py index 781a21cf..a200648f 100644 --- a/tests/threeml/test_spectral_fitting.py +++ b/tests/threeml/test_spectral_fitting.py @@ -8,7 +8,7 @@ data_path = test_data.path -sc_orientation = SpacecraftFile.parse_from_file(data_path / "20280301_2s.ori") +sc_orientation = SpacecraftFile.open(data_path / "20280301_2s.ori") dr = str(data_path / "test_full_detector_response.h5") # path to detector response data = BinnedData(data_path / "test_spectral_fit.yaml") diff --git a/tests/ts_map/test_fast_ts_map.py b/tests/ts_map/test_fast_ts_map.py index a44582b5..5742a8f3 100644 --- a/tests/ts_map/test_fast_ts_map.py +++ b/tests/ts_map/test_fast_ts_map.py @@ -16,7 +16,7 @@ def test_parallel_ts_fit(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftFile.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi']) @@ -121,7 +121,7 @@ def test_get_ei_cds_array_detector(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftFile.open(orientation_path) index = -2.2 K = 10 / u.cm / u.cm / u.s / u.keV @@ -154,7 +154,7 @@ def test_fast_ts_fit(): bkg_path = test_data.path / "ts_map_bkg.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.parse_from_file(orientation_path) + ori = SpacecraftFile.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi']) From dea56c162e824839602a8ecd61f8496c23862381 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 1 May 2025 11:22:54 -0400 Subject: [PATCH 027/133] Separate .ori parsing from future formats Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 30 ++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index f2a60db8..7a73f40e 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -1,9 +1,12 @@ +from pathlib import Path + import numpy as np import astropy.units as u from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, GCRS, ITRS +from docutils.io import Input from histpy import Histogram, TimeAxis from mhealpy import HealpixMap @@ -140,8 +143,20 @@ def open(cls, file) -> "SpacecraftFile": The SpacecraftFile object. """ - # Current SC format: - # 0: Always "OG" (for orbital geometry?) + file = Path(file) + + if file.suffix == ".ori": + return cls._parse_from_file(file) + else: + raise ValueError(f"File format for {file} not supported") + + @classmethod + def _parse_from_file(cls, file) -> "SpacecraftFile": + """ + Parses an .ori txt file with MEGAlib formatting. + + # Columns + # 0: Always "OG" (orbital geometry) # 1: obstime: timestamp in unix seconds # 2: lat_x: galactic latitude of SC x-axis (deg) # 3: lon_x: galactic longitude of SC x-axis (deg) @@ -152,6 +167,17 @@ def open(cls, file) -> "SpacecraftFile": # 8: Earth_lon: galactic longitude of the direction the Earth's zenith is pointing to at the SC location (deg) # 9: livetime (previously called SAA): accumulated uptime up to the following entry (seconds) + Parameters + ---------- + file: + Path to .ori file + + Returns + ------- + cosipy.spacecraftfile.spacecraft_file + The SpacecraftFile object. + """ + time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), unpack = True, delimiter=' ', skiprows=1, comments=("#", "EN")) time = Time(time, format="unix") From 2e3b28c5134369774d41525fbdfc79785c06f272 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 2 May 2025 15:54:52 -0400 Subject: [PATCH 028/133] Fix bug on SpacecraftFile select_interval for edge cases Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 80 +++++++++++++++------ tests/spacecraftfile/test_spacecraftfile.py | 19 ++++- 2 files changed, 75 insertions(+), 24 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 7a73f40e..d4dc1100 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -6,7 +6,7 @@ from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, GCRS, ITRS -from docutils.io import Input +from mhealpy import HealpixBase from histpy import Histogram, TimeAxis from mhealpy import HealpixMap @@ -150,7 +150,10 @@ def open(cls, file) -> "SpacecraftFile": else: raise ValueError(f"File format for {file} not supported") + from line_profiler_pycharm import profile + @classmethod + @profile def _parse_from_file(cls, file) -> "SpacecraftFile": """ Parses an .ori txt file with MEGAlib formatting. @@ -372,35 +375,63 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftFil start_points, start_weights = self.interp_weights(start) stop_points, stop_weights = self.interp_weights(stop) - start_attitude = self._interp_attitude(start_points, start_weights) - stop_attitude = self._interp_attitude(stop_points, stop_weights) + # Center values + new_obstime = self.obstime[start_points[1]:stop_points[1]] + new_attitude = self._attitude.as_matrix()[start_points[1]:stop_points[1]] + new_location = self._location[start_points[1]:stop_points[1]] + new_livetime = self.livetime[start_points[1]:stop_points[0]] - att_rot = self._attitude.as_matrix() - new_attitude = Attitude.from_matrix(np.append(start_attitude.as_matrix()[None], - np.append(att_rot[start_points[1]:stop_points[1]], - stop_attitude.as_matrix()[None], axis = 0), axis = 0), - frame = self._attitude.frame) + # Left edge + # new_obstime.size can be zero if the requested interval fell completely + # an existing interval + if new_obstime.size == 0 or new_obstime[0] != start: + # Left edge might be included already - start_location = self._interp_location(start_points, start_weights)[None] - stop_location = self._interp_location(stop_points, stop_weights)[None] - center_locations = self._location[start_points[1]:stop_points[1]] + new_obstime = Time(np.append(start.jd1, new_obstime.jd1), + np.append(start.jd2, new_obstime.jd2), + format = 'jd') + + start_attitude = self._interp_attitude(start_points, start_weights) + new_attitude = np.append(start_attitude.as_matrix()[None], new_attitude, axis=0) + + start_location = self._interp_location(start_points, start_weights)[None] + new_location = EarthLocation.from_geocentric(np.append(start_location.x, new_location.x), + np.append(start_location.y, new_location.y), + np.append(start_location.z, new_location.z)) - new_location = EarthLocation.from_geocentric(np.concatenate((start_location.x, center_locations.x, stop_location.x)), - np.concatenate((start_location.y, center_locations.y, stop_location.y)), - np.concatenate((start_location.z, center_locations.z, stop_location.z))) + first_livetime = self.livetime[start_points[0]] * start_weights[0] + new_livetime = np.append(first_livetime, new_livetime) - first_livetime = self.livetime[start_points[0]]*start_weights[1] - last_livetime = self.livetime[stop_points[0]]*stop_weights[1] + # Right edge + # It's never included, since stop <= self.obstime[stop_points[1]], and the + # selection above excludes stop_points[1] + new_obstime = Time(np.append(new_obstime.jd1, stop.jd1), + np.append(new_obstime.jd2, stop.jd2), + format='jd') - new_livetime = np.append(first_livetime, np.append(self.livetime[start_points[1]:stop_points[0]], last_livetime)) + stop_attitude = self._interp_attitude(stop_points, stop_weights) + new_attitude = np.append(new_attitude, stop_attitude.as_matrix()[None], axis=0) + new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) + + stop_location = self._interp_location(stop_points, stop_weights)[None] + new_location = EarthLocation.from_geocentric(np.append(new_location.x, stop_location.x), + np.append(new_location.y, stop_location.y), + np.append(new_location.z, stop_location.z)) - middle_times = self.obstime[start_points[1]:stop_points[1]] - new_time = Time(np.concatenate(([start.jd1], middle_times.jd1, [stop.jd1])), - np.concatenate(([start.jd2], middle_times.jd2, [stop.jd2])), - format = 'jd') + if np.all(start_points == stop_points): + # This can only happen if the requested interval fell completely + # an existing interval + new_livetime[0] -= self.livetime[stop_points[0]]*stop_weights[0] + else: + last_livetime = self.livetime[stop_points[0]]*stop_weights[1] + new_livetime = np.append(new_livetime, last_livetime) + + # We used the internal jd1 and jd2 values, which might have changed the format. + # Bring it back + new_obstime.format = self.obstime.format - return self.__class__(new_time, new_attitude, new_location, new_livetime) + return self.__class__(new_obstime, new_attitude, new_location, new_livetime) def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: @@ -430,7 +461,7 @@ def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: return src_path - def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring') -> HealpixMap: + def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring', base:HealpixBase = None) -> HealpixMap: """ Generates the dwell obstime map for the source. @@ -443,6 +474,8 @@ def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring') -> He Healpix NSIDE scheme: Healpix pixel ordering scheme + base: + HealpixBase defining the grid. Alternative to nside & scheme. Returns ------- @@ -456,6 +489,7 @@ def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring') -> He # Empty map dwell_map = HealpixMap(nside = nside, scheme = scheme, + base = base, coordsys = SpacecraftFrame()) # Fill diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index e4f9508f..dedafa8f 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -140,4 +140,21 @@ def test_select_interval(): assert np.allclose(np.asarray([z.transform_to('galactic').l.deg, z.transform_to('galactic').b.deg]).transpose().flatten(), np.array([221.86062093, 16.85631235, 221.88225011, 16.90482073, 221.90629597, 16.9587162 , 221.9087019 , 16.96410546])) - + + # Edge cases + new_ori = ori.select_interval(ori.tstart, ori.tstop) + assert np.all(new_ori.obstime == ori.obstime) + + new_ori = ori.select_interval(ori.obstime[1], ori.tstop) + assert np.all(new_ori.obstime == ori.obstime[1:]) + + new_ori = ori.select_interval(ori.tstart, ori.obstime[-2]) + assert np.all(new_ori.obstime == ori.obstime[:-1]) + + # Fully within single interval + new_ori = ori.select_interval(ori.tstart + .4*u.s, ori.tstart + .6*u.s) + assert new_ori.tstart == ori.tstart + .4*u.s + assert new_ori.tstop == ori.tstart + .6*u.s + assert new_ori.nintervals == 1 + assert np.isclose(new_ori.livetime[0], 0.2*u.s) + From 795bd4802f1e82f21abdd8149c0e6ab18e4ce10e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 2 May 2025 16:54:09 -0400 Subject: [PATCH 029/133] Using pandas instead of np.loadtxt since it's way faster Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index d4dc1100..37d9327b 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -12,6 +12,8 @@ from scoords import Attitude, SpacecraftFrame +import pandas as pd + from .scatt_map import SpacecraftAttitudeMap from typing import Union @@ -150,10 +152,7 @@ def open(cls, file) -> "SpacecraftFile": else: raise ValueError(f"File format for {file} not supported") - from line_profiler_pycharm import profile - @classmethod - @profile def _parse_from_file(cls, file) -> "SpacecraftFile": """ Parses an .ori txt file with MEGAlib formatting. @@ -181,8 +180,14 @@ def _parse_from_file(cls, file) -> "SpacecraftFile": The SpacecraftFile object. """ - time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = orientation_file = np.loadtxt(file, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), unpack = True, - delimiter=' ', skiprows=1, comments=("#", "EN")) + # First and last line are read only by MEGAlib e.g. + # Type OrientationsGalactic + # ... + # EN + # Using [:-1] instead of skipfooter=1 because otherwise it's slow and you get + # ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support skipfooter; you can avoid this warning by specifying engine='python'. + time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#').values[:-1].transpose() + time = Time(time, format="unix") xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") From eefb6f32bae99386ccc93d2804f3e72f5cdff6e0 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 2 May 2025 16:55:59 -0400 Subject: [PATCH 030/133] All elements are ready for a basic GRB fit. Still testing. Signed-off-by: Israel Martinez --- cosipy/background_estimation/__init__.py | 1 + cosipy/interfaces/background_interface.py | 2 +- cosipy/interfaces/expectation_interface.py | 16 +- cosipy/response/FullDetectorResponse.py | 3 + cosipy/response/PointSourceResponse.py | 44 ++++- cosipy/response/__init__.py | 1 + .../response/threeml_point_source_response.py | 154 ++++++++---------- cosipy/response/threeml_response.py | 40 +++-- cosipy/statistics/likelihood_functions.py | 13 +- cosipy/threeml/COSILike.py | 4 +- docs/api/interfaces/toy_interfaces_example.py | 24 ++- .../using_COSILike_with_interfaces.py | 105 ++++++++++-- 12 files changed, 284 insertions(+), 123 deletions(-) diff --git a/cosipy/background_estimation/__init__.py b/cosipy/background_estimation/__init__.py index 22ee8ac4..ee2ba681 100644 --- a/cosipy/background_estimation/__init__.py +++ b/cosipy/background_estimation/__init__.py @@ -1,2 +1,3 @@ from .LineBackgroundEstimation import LineBackgroundEstimation from .ContinuumEstimation import ContinuumEstimation +from .free_norm_threeml_binned_bkg import * diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 9d20dd7c..b86f986c 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -30,8 +30,8 @@ class ThreeMLBackgroundInterface(BackgroundInterface, Protocol): """ This must translate to/from regular parameters with arbitrary type from/to 3ML parameters - """ + def set_threeml_parameters(self, **parameters: Parameter): """ The Parameter objects are passed "as reference", and can change. diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 4e3e77dd..03502226 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -12,7 +12,21 @@ @runtime_checkable class BinnedExpectationInterface(Protocol): - def expectation(self, axes:histpy.Axes)->histpy.Histogram:... + def expectation(self, axes:histpy.Axes, copy:bool)->histpy.Histogram: + """ + + Parameters + ---------- + axes + copy: + If True, it will return an array that the user if free to modify. + Otherwise, it will result a reference, possible to the cache, that + the user should not modify + + Returns + ------- + + """ @runtime_checkable class UnbinnedExpectationInterface(Protocol): diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index 3c939f25..c6fe3423 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -734,6 +734,9 @@ def axes(self): """ return self._axes + def measurement_axes(self): + return self.axes['Em', 'Phi', 'PsiChi'] + @property def unit(self): """ diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index 56eaaef5..39fd37dd 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -4,6 +4,8 @@ import astropy.units as u from scoords import SpacecraftFrame, Attitude +from astromodels.core.polarization import Polarization, LinearPolarization, StokesPolarization + from .functions import get_integrated_spectral_model import logging @@ -42,7 +44,10 @@ def photon_energy_axis(self): """ return self.axes['Ei'] - + + def measurement_axes(self): + return self.axes['Em', 'Phi', 'PsiChi'] + def get_expectation(self, spectrum, polarization=None): """ Convolve the response with a spectral (and optionally, polarization) hypothesis to obtain the expected @@ -61,6 +66,39 @@ def get_expectation(self, spectrum, polarization=None): Histogram with the expected counts on each analysis bin """ + # FIXME: the logic of this code block should be moved to 3ML. + # We want to see if the source is polarized, and if so, confirm + # transform to linear polarization. + # https://github.com/threeML/astromodels/blob/master/astromodels/core/polarization.py + if polarization is not None: + + if type(polarization) == Polarization: + # FIXME: Polarization is the base class, but a 3ML source + # with no polarization default to the base class. + # The base class shouldn't be able to be instantiated, + # and we should have a NullPolarization subclass or None + polarization = None + + elif isinstance(polarization, LinearPolarization): + + if polarization.degree.value is 0: + polarization = None + + elif isinstance(polarization, StokesPolarization): + + # FIXME: Here we should convert the any Stokes parameters to Linear + # The circular component looks like unpolarized to us. + # This conversion is not yet implemented in Astromodels + raise ValueError("Fix me. I can't handle StokesPolarization yet") + + else: + + if isinstance(polarization, Polarization): + raise TypeError(f"Fix me. I don't know how to handle this polarization type") + else: + raise TypeError(f"Polarization must be a Polarization subclass") + + if polarization is None: if 'Pol' in self.axes.labels: @@ -68,7 +106,6 @@ def get_expectation(self, spectrum, polarization=None): raise RuntimeError("Must include polarization in point source response if using polarization response") contents = self.contents - axes = self.axes[1:] else: @@ -92,7 +129,6 @@ def get_expectation(self, spectrum, polarization=None): contents = np.tensordot(weights, self.contents, axes=([0], [self.axes.label_to_index('Pol')])) - axes = self.axes['Em', 'Phi', 'PsiChi'] energy_axis = self.photon_energy_axis @@ -104,7 +140,7 @@ def get_expectation(self, spectrum, polarization=None): if self.is_sparse: expectation *= self.unit * flux.unit - hist = Histogram(axes, contents=expectation) + hist = Histogram(self.measurement_axes, contents=expectation) if not hist.unit == u.dimensionless_unscaled: raise RuntimeError("Expectation should be dimensionless, but has units of " + str(hist.unit) + ".") diff --git a/cosipy/response/__init__.py b/cosipy/response/__init__.py index 3c178596..57b3788c 100644 --- a/cosipy/response/__init__.py +++ b/cosipy/response/__init__.py @@ -3,3 +3,4 @@ from .FullDetectorResponse import FullDetectorResponse from .ExtendedSourceResponse import ExtendedSourceResponse from .threeml_response import * +from .threeml_point_source_response import * diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 619a3a63..e366f0e9 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -1,3 +1,6 @@ +import logging +logger = logging.getLogger(__name__) + import copy from astromodels.sources import Source, PointSource @@ -8,10 +11,9 @@ from cosipy.response import FullDetectorResponse from cosipy.spacecraftfile import SpacecraftFile, SpacecraftAttitudeMap -__name__ = [] - from mhealpy import HealpixMap +__all__ = ["BinnedThreeMlPointSourceResponse"] class BinnedThreeMlPointSourceResponse(BinnedThreeMLSourceResponseInterface): """ @@ -34,23 +36,33 @@ def __init__(self, # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface self._dr = dr - self._sc_orientation = sc_orientation - - self._init_cache() + self._sc_ori = sc_orientation - def _init_cache(self): + self._source = None # Prevent unnecessary calculations and new memory allocations + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = None + self._expectation = None - self._scatt_map = None - self._source = None + # The PSR change for each direction, but it's the same for all spectrum parameters + + # Source location cached separately since changing the response + # for a given direction is expensive + self._last_convolved_source_skycoord = None + + self._psr = None + + def clear_cache(self): - # TODO: currently Model.__eq__ seems broken. It returns True even - # if the internal parameters changed. Currently, caching only work - # for the source position, but everything related to spectral and - # polarization is recalculated even if it's still the same - self._last_convolved_source = None + self._source = None + self._last_convolved_source_dict = None + self._expectation = None + self._last_convolved_source_skycoord = None + self._psr = None def copy(self) -> "BinnedThreeMlPointSourceResponse": """ @@ -60,7 +72,7 @@ def copy(self) -> "BinnedThreeMlPointSourceResponse": A copy than can be used safely to convolve another source """ new = copy.copy(self) - new._init_cache() + new.clear_cache() return new def set_source(self, source: Source): @@ -74,10 +86,16 @@ def set_source(self, source: Source): self._source = source - def expectation(self, axes:Axes)-> Histogram: + def expectation(self, axes:Axes, copy = True)-> Histogram: # TODO: check coordsys from axis # TODO: Earth occ always true in this case + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + source_dict = self._source.to_dict() + + coord = self._source.position.sky_coord + # Check if we can use these axes if 'PsiChi' not in axes.labels: raise ValueError("PsiChi axes not present") @@ -85,96 +103,58 @@ def expectation(self, axes:Axes)-> Histogram: if axes["PsiChi"].coordsys is None: raise ValueError("PsiChi axes doesn't have a coordinate system") - # Check what we can use from the cache - if self._expectation is None or self._expectation.axes != axes: - # Needs new memory allocation, and recompute everything - self._expectation = Histogram(axes) - else: - # If nothing has changed in the source, we can use the cached expectation - # as is. - # If the source has changed but the axes haven't, we can at least reuse - # it and prevent new memory allocation, we just need to zero it out - - # TODO: currently Source.__eq__ seems broken. It returns True even - # if some of the internal parameters changed. Caching the expected - # value is not implemented. Remove the "False and" when fixed - # Getting the source position explicitly does seem to work though - if False and (self._last_convolved_source == self._source): - return self._expectation + # Use cached expectation if nothing has changed + if self._expectation is not None and self._last_convolved_source_dict == source_dict: + if copy: + self._expectation.copy() else: - self._expectation.clear() + self._expectation # Expectation calculation # Check if the source position change, since these operations # are expensive - coord = self._source.position.sky_coord - if coord != self._last_convolved_source.position.sky_coord: + if self._psr is None or coord != self._last_convolved_source_skycoord: coordsys = axes["PsiChi"].coordsys + logger.info("... Calculating point source response ...") + if coordsys == 'spacecraftframe': - dwell_time_map = self._get_dwell_time_map(coord) - self._psr[name] = self._dr.get_point_source_response(exposure_map=dwell_time_map) - elif self._coordsys == 'galactic': - scatt_map = self._get_scatt_map(coord) - self._psr[name] = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + dwell_time_map = self._sc_ori.get_dwell_map(coord, base = self._dr) + self._psr = self._dr.get_point_source_response(exposure_map=dwell_time_map) + elif coordsys == 'galactic': + scatt_map = self._sc_ori.get_scatt_map(nside=self._dr.nside * 2, + target_coord=coord, + coordsys='galactic', + earth_occ = True) + self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) else: raise RuntimeError("Unknown coordinate system") - return self._expectation - - coord = self._source.position.sky_coord - - - if self._last_convolved_source.position != : - - - self._last_convolved_source = copy.deepcopy(self._source) - - def _get_scatt_map(self, coord:SkyCoord)->SpacecraftAttitudeMap: - """ - Get the spacecraft attitude map of the source. - - Since we're accounting for Earth occultation, this is specific - to this coordinate - - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - The coordinates of the target object. - - Returns - ------- - scatt_map : SpacecraftAttitudeMap - """ - - scatt_map = self._sc_orientation.get_scatt_map(nside=self._dr.nside * 2, target_coord=coord, - coordsys='galactic', earth_occ = True) - - return scatt_map - - def _get_dwell_time_map(self, coord: SkyCoord) -> HealpixMap: - """ - Get the dwell time map of the source. + logger.info(f"--> done (source name : {self._source.name})") - This is always specific to a coordinate. - Parameters - ---------- - coord : astropy.coordinates.SkyCoord - Coordinates of the target source - Returns - ------- - dwell_time_map : mhealpy.containers.healpix_map.HealpixMap - Dwell time map - """ + # Convolve with spectrum + self._expectation = self._psr.get_expectation(self._source.spectrum.main.shape, + self._source.spectrum.main.polarization) - self._sc_orientation.get_target_in_sc_frame(target_name=self._name, target_coord=coord) - dwell_time_map = self._sc_orientation.get_dwell_map(response=self._rsp_path) + # Check if axes match + if axes != self._expectation.axes: + raise ValueError( + "Currently, the expectation axes must exactly match the detector response measurement axes") - return dwell_time_map + # Cache. Use dict and copy since the internal variables can change + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = source_dict + self._last_convolved_source_skycoord = coord.copy() + # Copy to prevent user to modify our cache + if copy: + self._expectation.copy() + else: + self._expectation diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index e3cd6c2f..3e00bdde 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -34,10 +34,9 @@ def __init__(self, self._expectation = None self._model = None - # TODO: currently Model.__eq__ seems broken. It returns. True even - # if the internal parameters changed. Caching the expected value - # is not implemented. - self._last_convolved_model = None + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_model_dict = None def set_model(self, model: Model): """ @@ -61,10 +60,18 @@ def set_model(self, model: Model): for name,source in model.sources.items(): if isinstance(source, PointSource): + + if self._psr is None: + raise RuntimeError("The model includes a point source but no point source response was provided") + psr_copy = self._psr.copy() psr_copy.set_source(source) self._source_responses[name] = psr_copy elif isinstance(source, ExtendedSource): + + if self._esr is None: + raise RuntimeError("The model includes an extended source but no extended source response was provided") + esr_copy = self._esr.copy() esr_copy.set_source(source) self._source_responses[name] = esr_copy @@ -75,17 +82,22 @@ def set_model(self, model: Model): self._model = model - def expectation(self, axes:Axes)->Histogram: + def expectation(self, axes:Axes, copy:bool = True)->Histogram: """ Parameters ---------- axes + copy Returns ------- """ + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + current_model_dict = self._model.to_dict() + if self._expectation is None or self._expectation.axes != axes: # Needs new memory allocation, and recompute everything self._expectation = Histogram(axes) @@ -98,8 +110,11 @@ def expectation(self, axes:Axes)->Histogram: # TODO: currently Model.__eq__ seems broken. It returns. True even # if the internal parameters changed. Caching the expected value # is not implemented. Remove the "False and" when fixed - if False and (self._last_convolved_model == self._model): - return self._expectation + if self._last_convolved_model_dict == current_model_dict: + if copy: + self._expectation.copy() + else: + self._expectation else: self._expectation.clear() @@ -107,8 +122,11 @@ def expectation(self, axes:Axes)->Histogram: for source_name,psr in self._source_responses.items(): self._expectation += psr.expectation(axes) - # Get a copy with at model parameter values at the current time, - # not just a reference to the model object - self._last_convolved_model = copy.deepcopy(self._model) + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_model_dict = current_model_dict - return self._expectation \ No newline at end of file + if copy: + self._expectation.copy() + else: + self._expectation \ No newline at end of file diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index ce34b394..748409b8 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -24,13 +24,20 @@ def __init__(self, self._bkg = bkg self._response = response + @property + def has_bkg(self): + return self._bkg is not None + def get_log_like(self) -> float: # Compute expectation including background - expectation = self._response.expectation(self._data.data.axes) + # If we don't have background, we won't modify the expectation, so + # it's safe to use the internal cache. + expectation = self._response.expectation(self._data.data.axes, copy = self.has_bkg) - if self._bkg is not None: - expectation = expectation + self._bkg.expectation(self._data.data.axes) + if self.has_bkg: + # We won't modify the bkg expectation, so it's safe to use the internal cache + expectation += self._bkg.expectation(self._data.data.axes, copy = False) # Get the arrays expectation = expectation.contents diff --git a/cosipy/threeml/COSILike.py b/cosipy/threeml/COSILike.py index 2498017f..6545e4d1 100644 --- a/cosipy/threeml/COSILike.py +++ b/cosipy/threeml/COSILike.py @@ -22,7 +22,7 @@ def __init__(self, data response bkg - likefun: str or LikelihoodInterface (Use at your own risk. make sure uses data, response and bkg) + likefun: str or LikelihoodInterface (Use at your own risk. make sure it knows about the input data, response and bkg) """ # PluginPrototype.__init__ does the following: @@ -47,7 +47,7 @@ def __init__(self, @property def nuisance_parameters(self) -> Dict[str, Parameter]: - # Add plugin name, required by 3ML code + # Adds plugin name, required by 3ML code # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 if self._bkg is None: return {} diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 9ef16729..ff114c6c 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -1,6 +1,8 @@ from typing import Dict, Any from astromodels.sources import Source +from astromodels import LinearPolarization, SpectralComponent +from astromodels.core.polarization import Polarization from cosipy.threeml import COSILike from cosipy.interfaces import (BinnedDataInterface, @@ -139,6 +141,8 @@ def expectation(self, axes: Axes) -> Histogram: if self._source is None: raise RuntimeError("Set a source first") + print(self._source.to_dict()) + # Get the latest values of the flux # Remember that _model can be modified externally between calls. flux = self._source.spectrum.main.shape.k.value @@ -184,9 +188,22 @@ def expectation(self, axes: Axes) -> Histogram: ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV spectrum = Constant() -source = PointSource("source", # arbitrary, but needs to be unique - l=0, b=0, # Doesn't matter - spectral_shape=spectrum) + +polarized = False + +if polarized: + polarization = LinearPolarization(10, 10) + polarization.degree.value = 0. + polarization.angle.value = 10 + + spectral_component = SpectralComponent('arbitrary_spectrum_name', spectrum, polarization) + source = PointSource('arbitrary_source_name', 0, 0, components=[spectral_component]) +else: + + source = PointSource("arbitrary_source_name", + l=0, b=0, # Doesn't matter + spectral_shape=spectrum) + model = Model(source) # Here you can set the parameters initial values, bounds, etc. @@ -203,6 +220,7 @@ def expectation(self, axes: Axes) -> Histogram: plugins = DataList(cosi) like = JointLikelihood(model, plugins) like.fit() +print(like.minimizer) # Plot results fig, ax = plt.subplots() diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 590a26d1..1c719b12 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -1,21 +1,37 @@ -from cosipy.threeml import COSILike -from cosipy.response import BinnedThreeMLResponse -from threeML import Band, PointSource, Model, JointLikelihood, DataList, Parameter +from histpy import Histogram +from cosipy.background_estimation import FreeNormThreeMLBinnedBackground +from cosipy.threeml import COSILike +from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from cosipy.util import fetch_wasabi_file +from cosipy.spacecraftfile import SpacecraftFile from astropy import units as u -# Options for point sources -psr = BinnedThreeMlPointSourceResponse() +from cosipy import COSILike, BinnedData +from cosipy.spacecraftfile import SpacecraftFile +from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.util import fetch_wasabi_file -psr = BinnedThreeMlPointSourceResponse +from scoords import SpacecraftFrame -# Option for extended sources -esr = BinnedThreeMLExtendedSourceResponse() +from astropy.time import Time +import astropy.units as u +from astropy.coordinates import SkyCoord +from astropy.stats import poisson_conf_interval -response = BinnedThreeMLResponse(point_source_response = psr, - extended_source_response = esr) +import numpy as np +import matplotlib.pyplot as plt + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from cosipy import Band_Eflux +from astromodels import Parameter + +from pathlib import Path + +import os -# Set model +# Set model to fit l = 93. b = -53. @@ -46,9 +62,76 @@ model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) +# Data preparation +data_path = Path("") # /path/to/files. Current dir by default +# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') +# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') +# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') + + +grb = BinnedData(data_path / "grb.yaml") +grb_bkg = BinnedData(data_path / "grb.yaml") +bkg = BinnedData(data_path / "background.yaml") + +grb.load_binned_data_from_hdf5(binned_data=data_path / "grb_binned_data.hdf5") +grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") +bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") + +# Generate interface on the fly. All we need is to implement this method +# @property +# def data(self) -> histpy.Histogram:... + +# We can move this to BinnedData later, but this showed the flexibility of using Protocols over abstract classes +data = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') + +class BinnedDataAux: + @property + def data(self) -> Histogram: + return data + +data = BinnedDataAux() + +bkg_tmin = 1842597310.0 +bkg_tmax = 1842597550.0 +bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] +bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] +bkg = FreeNormThreeMLBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) + +# Response preparation +# fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') +# fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + +# import cProfile +# cProfile.run('ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori")', filename = "prof.prof") +# exit() + +ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori") +tmin = Time(1842597410.0,format = 'unix') +tmax = Time(1842597450.0,format = 'unix') +sc_orientation = ori.select_interval(tmin, tmax) + +dr = FullDetectorResponse.open(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") + +# Options for point sources +psr = BinnedThreeMlPointSourceResponse(dr, ori) + +# Option for extended sources +# Not yet implemented +#esr = BinnedThreeMLExtendedSourceResponse() +esr = None + +response = BinnedThreeMLResponse(point_source_response = psr, + extended_source_response = esr) + + + # Optional: if you want to call get_log_like manually, then you also need to set the model manually # 3ML does this internally during the fit though cosi = COSILike('cosi', data, response, bkg) plugins = DataList(cosi) like = JointLikelihood(model, plugins) like.fit() + +results = like.results + +print(results.display()) \ No newline at end of file From 1441cf5c1e7cc3fa966e918409cab01063fa1791 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 3 May 2025 07:08:05 -0400 Subject: [PATCH 031/133] Cut SC file from the beginning to make it faster Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 37 ++++++++++++++++++++---- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 37d9327b..bf4e11fe 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -129,7 +129,7 @@ def location(self)->EarthLocation: return self._location @classmethod - def open(cls, file) -> "SpacecraftFile": + def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": """ Parses timestamps, axis positions from file and returns to __init__. @@ -138,6 +138,12 @@ def open(cls, file) -> "SpacecraftFile": ---------- file : str The file path of the pointings. + tstart: + Start reading the file from an interval *including* this time. Use select_interval() to + cut the SC file at exactly this tiem. + tstop: + Stop reading the file at an interval *including* this time. Use select_interval() to + cut the SC file at exactly this tiem. Returns ------- @@ -148,12 +154,14 @@ def open(cls, file) -> "SpacecraftFile": file = Path(file) if file.suffix == ".ori": - return cls._parse_from_file(file) + return cls._parse_from_file(file, tstart, tstop) else: raise ValueError(f"File format for {file} not supported") + from line_profiler_pycharm import profile @classmethod - def _parse_from_file(cls, file) -> "SpacecraftFile": + @profile + def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": """ Parses an .ori txt file with MEGAlib formatting. @@ -186,10 +194,29 @@ def _parse_from_file(cls, file) -> "SpacecraftFile": # EN # Using [:-1] instead of skipfooter=1 because otherwise it's slow and you get # ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support skipfooter; you can avoid this warning by specifying engine='python'. - time,lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#').values[:-1].transpose() + # Read only the time column, to make it faster + time, = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1,), header=None, comment='#').values[:-1].transpose() time = Time(time, format="unix") + start_row = 0 + nrows = time.size + + if tstart is not None or tstop is not None: + + time_axis = TimeAxis(time, copy=False) + + if tstart is not None: + start_row = time_axis.find_bin(tstart) + + if tstop is not None: + nrows = time_axis.find_bin(tstop) - start_row + 2 + + time = time[slice(start_row, start_row+nrows)] + + skiprows = 1 + start_row + lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=skiprows, nrows = nrows, usecols=(2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#', ).values.transpose() + xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") @@ -466,7 +493,7 @@ def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: return src_path - def get_dwell_map(self, target_coord:SkyCoord, nside:int, scheme = 'ring', base:HealpixBase = None) -> HealpixMap: + def get_dwell_map(self, target_coord:SkyCoord, nside:int = None, scheme = 'ring', base:HealpixBase = None) -> HealpixMap: """ Generates the dwell obstime map for the source. From 3b81c91b43df414340d8c18352aac54654636f41 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 3 May 2025 07:19:33 -0400 Subject: [PATCH 032/133] Slightly faster this way Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 30 ++++++++++++++---------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index bf4e11fe..1b7dc147 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -195,27 +195,33 @@ def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "Space # Using [:-1] instead of skipfooter=1 because otherwise it's slow and you get # ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support skipfooter; you can avoid this warning by specifying engine='python'. - # Read only the time column, to make it faster - time, = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1,), header=None, comment='#').values[:-1].transpose() - time = Time(time, format="unix") + time, lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=1, usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#', ).values[:-1].transpose() - start_row = 0 - nrows = time.size + time = Time(time, format="unix") if tstart is not None or tstop is not None: + # Cut early to skip some conversions later on + + start_idx = 0 + stop_idx = time.size time_axis = TimeAxis(time, copy=False) if tstart is not None: - start_row = time_axis.find_bin(tstart) + start_idx = time_axis.find_bin(tstart) if tstop is not None: - nrows = time_axis.find_bin(tstop) - start_row + 2 - - time = time[slice(start_row, start_row+nrows)] - - skiprows = 1 + start_row - lat_x,lon_x,lat_z,lon_z,altitude,earth_lat,earth_lon,livetime = pd.read_csv(file, sep="\s+", skiprows=skiprows, nrows = nrows, usecols=(2, 3, 4, 5, 6, 7, 8, 9), header = None, comment = '#', ).values.transpose() + stop_idx = time_axis.find_bin(tstop) + 2 + + time = time[start_idx:stop_idx] + lat_x = lat_x[start_idx:stop_idx] + lon_x = lon_x[start_idx:stop_idx] + lat_z = lat_z[start_idx:stop_idx] + lon_z = lon_z[start_idx:stop_idx] + altitude = altitude[start_idx:stop_idx] + earth_lat = earth_lat[start_idx:stop_idx] + earth_lon = earth_lon[start_idx:stop_idx] + livetime = livetime[start_idx:stop_idx] xpointings = SkyCoord(l=lon_x * u.deg, b=lat_x * u.deg, frame="galactic") zpointings = SkyCoord(l=lon_z * u.deg, b=lat_z * u.deg, frame="galactic") From 6719cb9108582880135b7f492029775bee455c75 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 10:41:23 -0400 Subject: [PATCH 033/133] Working version. Runs but some results are nan. Still checking. Signed-off-by: Israel Martinez --- cosipy/response/PointSourceResponse.py | 1 + .../response/threeml_point_source_response.py | 18 +- cosipy/response/threeml_response.py | 8 +- cosipy/spacecraftfile/spacecraft_file.py | 2 - .../using_COSILike_with_interfaces.py | 158 +++++++++--------- tests/spacecraftfile/test_spacecraftfile.py | 24 +++ 6 files changed, 119 insertions(+), 92 deletions(-) diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index 39fd37dd..d02613fe 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -45,6 +45,7 @@ def photon_energy_axis(self): return self.axes['Ei'] + @property def measurement_axes(self): return self.axes['Em', 'Phi', 'PsiChi'] diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index e366f0e9..0287cd61 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -4,7 +4,7 @@ import copy from astromodels.sources import Source, PointSource -from astropy.coordinates import SkyCoord +from scoords import SpacecraftFrame from histpy import Axes, Histogram from cosipy.interfaces import BinnedThreeMLSourceResponseInterface @@ -106,9 +106,9 @@ def expectation(self, axes:Axes, copy = True)-> Histogram: # Use cached expectation if nothing has changed if self._expectation is not None and self._last_convolved_source_dict == source_dict: if copy: - self._expectation.copy() + return self._expectation.copy() else: - self._expectation + return self._expectation # Expectation calculation @@ -120,17 +120,15 @@ def expectation(self, axes:Axes, copy = True)-> Histogram: logger.info("... Calculating point source response ...") - if coordsys == 'spacecraftframe': + if isinstance(coordsys, SpacecraftFrame): dwell_time_map = self._sc_ori.get_dwell_map(coord, base = self._dr) self._psr = self._dr.get_point_source_response(exposure_map=dwell_time_map) - elif coordsys == 'galactic': + else: scatt_map = self._sc_ori.get_scatt_map(nside=self._dr.nside * 2, target_coord=coord, - coordsys='galactic', + coordsys=coordsys, earth_occ = True) self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) - else: - raise RuntimeError("Unknown coordinate system") logger.info(f"--> done (source name : {self._source.name})") @@ -153,8 +151,8 @@ def expectation(self, axes:Axes, copy = True)-> Histogram: # Copy to prevent user to modify our cache if copy: - self._expectation.copy() + return self._expectation.copy() else: - self._expectation + return self._expectation diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index 3e00bdde..05da26a3 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -112,9 +112,9 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: # is not implemented. Remove the "False and" when fixed if self._last_convolved_model_dict == current_model_dict: if copy: - self._expectation.copy() + return self._expectation.copy() else: - self._expectation + return self._expectation else: self._expectation.clear() @@ -127,6 +127,6 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: self._last_convolved_model_dict = current_model_dict if copy: - self._expectation.copy() + return self._expectation.copy() else: - self._expectation \ No newline at end of file + return self._expectation \ No newline at end of file diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 1b7dc147..cb465edb 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -158,9 +158,7 @@ def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": else: raise ValueError(f"File format for {file} not supported") - from line_profiler_pycharm import profile @classmethod - @profile def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": """ Parses an .ori txt file with MEGAlib formatting. diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 1c719b12..7c2efafa 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -31,107 +31,113 @@ import os -# Set model to fit -l = 93. -b = -53. +def main(): -alpha = -1 -beta = -3 -xp = 450. * u.keV -piv = 500. * u.keV -K = 1 / u.cm / u.cm / u.s / u.keV + # Set model to fit + l = 93. + b = -53. -spectrum = Band() + alpha = -1 + beta = -3 + xp = 450. * u.keV + piv = 500. * u.keV + K = 1 / u.cm / u.cm / u.s / u.keV -spectrum.beta.min_value = -15.0 + spectrum = Band() -spectrum.alpha.value = alpha -spectrum.beta.value = beta -spectrum.xp.value = xp.value -spectrum.K.value = K.value -spectrum.piv.value = piv.value + spectrum.beta.min_value = -15.0 -spectrum.xp.unit = xp.unit -spectrum.K.unit = K.unit -spectrum.piv.unit = piv.unit + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value -source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l = l, # Longitude (deg) - b = b, # Latitude (deg) - spectral_shape = spectrum) # Spectral model + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit -model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l = l, # Longitude (deg) + b = b, # Latitude (deg) + spectral_shape = spectrum) # Spectral model -# Data preparation -data_path = Path("") # /path/to/files. Current dir by default -# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') -# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') -# fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') + model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + # Data preparation + data_path = Path("") # /path/to/files. Current dir by default + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') -grb = BinnedData(data_path / "grb.yaml") -grb_bkg = BinnedData(data_path / "grb.yaml") -bkg = BinnedData(data_path / "background.yaml") -grb.load_binned_data_from_hdf5(binned_data=data_path / "grb_binned_data.hdf5") -grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") -bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") + grb = BinnedData(data_path / "grb.yaml") + grb_bkg = BinnedData(data_path / "grb.yaml") + bkg = BinnedData(data_path / "background.yaml") -# Generate interface on the fly. All we need is to implement this method -# @property -# def data(self) -> histpy.Histogram:... + grb.load_binned_data_from_hdf5(binned_data=data_path / "grb_binned_data.hdf5") + grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") -# We can move this to BinnedData later, but this showed the flexibility of using Protocols over abstract classes -data = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') + # Generate interface on the fly. All we need is to implement this method + # @property + # def data(self) -> histpy.Histogram:... -class BinnedDataAux: - @property - def data(self) -> Histogram: - return data + # We can move this to BinnedData later, but this showed the flexibility of using Protocols over abstract classes + data_hist = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') -data = BinnedDataAux() + class BinnedDataAux: + @property + def data(self) -> Histogram: + return data_hist -bkg_tmin = 1842597310.0 -bkg_tmax = 1842597550.0 -bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] -bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] -bkg = FreeNormThreeMLBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) + data = BinnedDataAux() -# Response preparation -# fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') -# fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + bkg_tmin = 1842597310.0 + bkg_tmax = 1842597550.0 + bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] + bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] + bkg = FreeNormThreeMLBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) -# import cProfile -# cProfile.run('ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori")', filename = "prof.prof") -# exit() + # Response preparation + # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') + # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') -ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori") -tmin = Time(1842597410.0,format = 'unix') -tmax = Time(1842597450.0,format = 'unix') -sc_orientation = ori.select_interval(tmin, tmax) + tmin = Time(1842597410.0, format='unix') + tmax = Time(1842597450.0, format='unix') + ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) + sc_orientation = ori.select_interval(tmin, tmax) -dr = FullDetectorResponse.open(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") + dr = FullDetectorResponse.open(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") -# Options for point sources -psr = BinnedThreeMlPointSourceResponse(dr, ori) + # Options for point sources + psr = BinnedThreeMlPointSourceResponse(dr, ori) -# Option for extended sources -# Not yet implemented -#esr = BinnedThreeMLExtendedSourceResponse() -esr = None + # Option for extended sources + # Not yet implemented + #esr = BinnedThreeMLExtendedSourceResponse() + esr = None -response = BinnedThreeMLResponse(point_source_response = psr, - extended_source_response = esr) + response = BinnedThreeMLResponse(point_source_response = psr, + extended_source_response = esr) -# Optional: if you want to call get_log_like manually, then you also need to set the model manually -# 3ML does this internally during the fit though -cosi = COSILike('cosi', data, response, bkg) -plugins = DataList(cosi) -like = JointLikelihood(model, plugins) -like.fit() + # Optional: if you want to call get_log_like manually, then you also need to set the model manually + # 3ML does this internally during the fit though + cosi = COSILike('cosi', data, response, bkg) + plugins = DataList(cosi) + like = JointLikelihood(model, plugins) + like.fit() -results = like.results + results = like.results -print(results.display()) \ No newline at end of file + print(results.display()) + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof.prof") + exit() + + main() \ No newline at end of file diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index dedafa8f..19e24efb 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -22,6 +22,30 @@ def test_get_time(): 1835478009.0, 1835478010.0]) +def test_read_only_selected_range(): + + ori_path = test_data.path / "20280301_first_10sec.ori" + + ori = SpacecraftFile.open(ori_path, + tstart=Time(1835478002.0, format = 'unix'), + tstop = Time(1835478008.0, format='unix') + ) + + assert np.allclose(ori.obstime.unix, + [1835478002.0, + 1835478003.0, 1835478004.0, 1835478005.0, + 1835478006.0, 1835478007.0, 1835478008.0, 1835478009.0]) + + ori = SpacecraftFile.open(ori_path, + tstart=Time(1835478002.5, format = 'unix'), + tstop = Time(1835478007.5, format='unix') + ) + + assert np.allclose(ori.obstime.unix, + [1835478002.0, + 1835478003.0, 1835478004.0, 1835478005.0, + 1835478006.0, 1835478007.0, 1835478008.0]) + def test_get_time_delta(): ori_path = test_data.path / "20280301_first_10sec.ori" From a7b1d4ffec2b54cab3c8ba2b1c7c89f6492a5c46 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 10:55:20 -0400 Subject: [PATCH 034/133] Move 3ML plugin in preparation for generic plugin Signed-off-by: Israel Martinez --- .../COSILike.py => interfaces/threeml_plugin_interface.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename cosipy/{threeml/COSILike.py => interfaces/threeml_plugin_interface.py} (100%) diff --git a/cosipy/threeml/COSILike.py b/cosipy/interfaces/threeml_plugin_interface.py similarity index 100% rename from cosipy/threeml/COSILike.py rename to cosipy/interfaces/threeml_plugin_interface.py From b604db05f01fa15cc617a4ad450a03840678c361 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 10:56:17 -0400 Subject: [PATCH 035/133] Forgot to commit this one on 6719cb9108582880135b7f492029775bee455c75 Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 185 ++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 cosipy/background_estimation/free_norm_threeml_binned_bkg.py diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py new file mode 100644 index 00000000..2f1b252a --- /dev/null +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -0,0 +1,185 @@ +from typing import Dict, Tuple, Union, Any + +import numpy as np +from astromodels import Parameter +from histpy import Histogram +from histpy import Axes + +from cosipy.interfaces import ThreeMLBinnedBackgroundInterface, BinnedBackgroundInterface + +__all__ = ["FreeNormBinnedBackground", + "FreeNormThreeMLBinnedBackground"] + +class FreeNormBinnedBackground(BinnedBackgroundInterface): + """ + This must translate to/from regular parameters + with arbitrary type from/to 3ML parameters + + """ + + def __init__(self, *args:Tuple[Histogram], **kwargs:Dict[str, Histogram]): + + self._components = {} + + for n,bkg in enumerate(args): + self._components[self._standardized_label(n)] = bkg + + for label, bkg in kwargs.items(): + if label in self.labels: + raise ValueError("Repeated bkg component label.") + + self._components[label] = bkg + + # These will be densify anyway since _expectation is dense + # And histpy doesn't yet handle this operation efficiently + # See Histogram._inplace_operation_handle_sparse() + # Do it once and for all + for label, bkg in self._components.items(): + if bkg.is_sparse: + self._components[label] = bkg.to_dense() + + if self.ncomponents == 0: + raise ValueError("You need to input at least one components") + + self._axes = None + for bkg in self._components.values(): + if self._axes is None: + self._axes = bkg.axes + else: + if self._axes != bkg.axes: + raise ValueError("All background components mus have the same axes") + + self._norms = {l:1 for l in self.labels} + + # Cache + self._expectation = None + self._last_norm_values = None + + def _standardized_label(self, label:Union[str, int]): + if isinstance(label, str): + return label + else: + return f"bkg{label}" + + @property + def norm(self): + + if self.ncomponents != 1: + raise RuntimeError("This property can only be used for single-component models") + + return next(iter(self._norms.values())) + + @property + def norms(self): + return self._norms.values() + + @property + def ncomponents(self): + return len(self._components) + + @property + def meausured_axes(self): + return self._axes + + @property + def labels(self): + return self._components.keys() + + def set_norm(self, *args, **kwargs): + + for n,norm in enumerate(args): + self._set_norm(n, norm) + + for label, bkg in kwargs.items(): + self._set_norm(label, norm) + + def _set_norm(self, label, norm): + + label = self._standardized_label(label) + + if label in self.labels: + raise RuntimeError(f"Component {label} doesn't exist") + + self._norms[label] = norm + + def set_parameters(self, **parameters:Any) -> None: + """ + Same keys as background components + """ + + self.set_norm(**parameters) + + @property + def parameters(self) -> Dict[str, Any]: + + return self._norms + + def expectation(self, axes:Axes, copy:bool)->Histogram: + """ + + Parameters + ---------- + axes + copy: + If True, it will return an array that the user if free to modify. + Otherwise, it will result a reference, possible to the cache, that + the user should not modify + + Returns + ------- + + """ + + if axes != self.meausured_axes: + raise ValueError("Requested axes do not match the background component axes") + + # Check if we can use the cache + if self._expectation is None: + # First call. Initialize + self._expectation = Histogram(self.meausured_axes) + + elif self._norms == self._last_norm_values: + # No changes. Use cache + if copy: + return self._expectation.copy() + else: + return self._expectation + + else: + # First call or norms have change. Recalculate + self._expectation.clear() + + # Compute expectation + for label in self.labels: + self._expectation += self._norms[label] * self._components[label] + + # Cache. Regular copy is enough since norm values are float en not mutable + self._last_norm_values = self._norms.copy() + + if copy: + return self._expectation.copy() + else: + return self._expectation + +class FreeNormThreeMLBinnedBackground(FreeNormBinnedBackground, ThreeMLBinnedBackgroundInterface): + + def __init__(self, *args:Tuple[Histogram], **kwargs:Dict[str, Histogram]): + + super().__init__(*args, **kwargs) + + # 3ML "Parameter" keeps track of a few more things than + # a "bare" parameter. + self._threeml_parameters = {label:Parameter(label, norm) for label,norm in self._norms.items()} + + def set_threeml_parameters(self, **parameters: Parameter): + self._threeml_parameters = parameters + self.set_parameters(**{label:parameter.value for label,parameter in parameters.items()}) + + @property + def threeml_parameters(self) -> Dict[str, Parameter]: + return self._threeml_parameters + + + + + From b5262554f9bf87588af6f3cf903978d6f7030591 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 11:34:41 -0400 Subject: [PATCH 036/133] Finish moving to generic plugin. Work as well as before. Still some nans though. Signed-off-by: Israel Martinez --- cosipy/__init__.py | 1 - cosipy/interfaces/__init__.py | 1 + cosipy/interfaces/expectation_interface.py | 7 ++- cosipy/interfaces/likelihood_interface.py | 18 ++++++-- cosipy/interfaces/threeml_plugin_interface.py | 43 ++++++------------- cosipy/statistics/likelihood_functions.py | 7 +++ cosipy/threeml/__init__.py | 1 - cosipy/ts_map/TSMap.py | 2 - docs/api/interfaces/toy_interfaces_example.py | 25 ++++++----- .../using_COSILike_with_interfaces.py | 7 +-- 10 files changed, 58 insertions(+), 54 deletions(-) diff --git a/cosipy/__init__.py b/cosipy/__init__.py index 48489fc5..27a1e1ef 100644 --- a/cosipy/__init__.py +++ b/cosipy/__init__.py @@ -9,7 +9,6 @@ from .data_io import BinnedData from .data_io import ReadTraTest -from .threeml import COSILike from .threeml import Band_Eflux from .ts_map import FastTSMap diff --git a/cosipy/interfaces/__init__.py b/cosipy/interfaces/__init__.py index 203c3353..2b0e919e 100644 --- a/cosipy/interfaces/__init__.py +++ b/cosipy/interfaces/__init__.py @@ -4,5 +4,6 @@ from .likelihood_interface import * from .expectation_interface import * from .source_response_interface import * +from .threeml_plugin_interface import * diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 03502226..562bdd0c 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -11,7 +11,10 @@ ] @runtime_checkable -class BinnedExpectationInterface(Protocol): +class ExpectationInterface(Protocol):... + +@runtime_checkable +class BinnedExpectationInterface(ExpectationInterface, Protocol): def expectation(self, axes:histpy.Axes, copy:bool)->histpy.Histogram: """ @@ -29,7 +32,7 @@ def expectation(self, axes:histpy.Axes, copy:bool)->histpy.Histogram: """ @runtime_checkable -class UnbinnedExpectationInterface(Protocol): +class UnbinnedExpectationInterface(ExpectationInterface, Protocol): @property def ncounts(self) -> float:... def probability(self, measurements:Measurements) -> np.ndarray:... diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index b54d9bb4..77ba442c 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -4,16 +4,28 @@ 'BinnedLikelihoodInterface', 'UnbinnedLikelihoodInterface'] -from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface -from .data_interface import UnbinnedDataInterface, BinnedDataInterface -from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface +from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface, ExpectationInterface +from .data_interface import UnbinnedDataInterface, BinnedDataInterface, DataInterface +from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface, BackgroundInterface, \ + ThreeMLBackgroundInterface @runtime_checkable class LikelihoodInterface(Protocol): + def __init__(self, + data: DataInterface, + response: ExpectationInterface, + bkg: BackgroundInterface, + *args, **kwargs):... def get_log_like(self) -> float:... @property def nobservations(self) -> int: """For BIC and other statistics""" + @property + def data (self) -> DataInterface: ... + @property + def response(self) -> ExpectationInterface: ... + @property + def bkg (self) -> BackgroundInterface: ... @runtime_checkable class BinnedLikelihoodInterface(LikelihoodInterface, Protocol): diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py index 6545e4d1..55d05385 100644 --- a/cosipy/interfaces/threeml_plugin_interface.py +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -1,27 +1,19 @@ from typing import Dict + +from cosipy.interfaces.likelihood_interface import LikelihoodInterface from threeML import PluginPrototype, Parameter -from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood -from cosipy.interfaces import (DataInterface, - ThreeMLModelResponseInterface, - ThreeMLBackgroundInterface, - LikelihoodInterface, ThreeMLBinnedBackgroundInterface) -class COSILike(PluginPrototype): +__all__ = ["ThreeMLPluginInterface"] + +class ThreeMLPluginInterface(PluginPrototype): def __init__(self, - name, - data: DataInterface, - response: ThreeMLModelResponseInterface, - bkg: ThreeMLBackgroundInterface, - likelihood = 'poisson'): + name: str, likelihood: LikelihoodInterface): """ Parameters ---------- name - data - response - bkg likefun: str or LikelihoodInterface (Use at your own risk. make sure it knows about the input data, response and bkg) """ @@ -32,39 +24,28 @@ def __init__(self, # we're overriding nuisance_parameters() and update_nuisance_parameters() super().__init__(name, {}) - self._bkg = bkg - self._response = response - - if isinstance(likelihood, LikelihoodInterface): - # Use user's likelihood at their own risk - self._like = likelihood - elif likelihood == 'poisson': - self._like = PoissonLikelihood(data, response, bkg) - elif likelihood == 'unbinned': - self._like = UnbinnedLikelihood(data, response, bkg) - else: - raise ValueError(f"Likelihood function \"{likelihood}\" not supported") + self._like = likelihood @property def nuisance_parameters(self) -> Dict[str, Parameter]: # Adds plugin name, required by 3ML code # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 - if self._bkg is None: + if self._like.bkg is None: return {} else: - return {self._name + "_" + l:p for l,p in self._bkg.threeml_parameters.items()} + return {self._name + "_" + l:p for l,p in self._like.bkg.threeml_parameters.items()} def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): # Remove plugin name. Opposite of the nuisance_parameters property - if self._bkg is not None: + if self._like.bkg is not None: new_nuisance_parameters = {l[len(self._name)+1:]:p for l,p in new_nuisance_parameters.items()} - self._bkg.set_threeml_parameters(**new_nuisance_parameters) + self._like.bkg.set_threeml_parameters(**new_nuisance_parameters) def get_number_of_data_points(self) -> int: return self._like.nobservations def set_model(self, model): - self._response.set_model(model) + self._like.response.set_model(model) def get_log_like(self): return self._like.get_log_like() diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 748409b8..2aa71b27 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -24,6 +24,13 @@ def __init__(self, self._bkg = bkg self._response = response + @property + def data (self) -> BinnedDataInterface: return self._data + @property + def response(self) -> BinnedExpectationInterface: return self._response + @property + def bkg (self) -> BinnedBackgroundInterface: return self._bkg + @property def has_bkg(self): return self._bkg is not None diff --git a/cosipy/threeml/__init__.py b/cosipy/threeml/__init__.py index e5bf2d12..a6eaca5e 100644 --- a/cosipy/threeml/__init__.py +++ b/cosipy/threeml/__init__.py @@ -1,2 +1 @@ -from .COSILike import COSILike from .custom_functions import Band_Eflux diff --git a/cosipy/ts_map/TSMap.py b/cosipy/ts_map/TSMap.py index c5017aa5..c463a698 100644 --- a/cosipy/ts_map/TSMap.py +++ b/cosipy/ts_map/TSMap.py @@ -1,5 +1,3 @@ -from cosipy.threeml.COSILike import COSILike - from threeML import DataList, Powerlaw, PointSource, Model, JointLikelihood import numpy as np diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index ff114c6c..7544b837 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -4,13 +4,15 @@ from astromodels import LinearPolarization, SpectralComponent from astromodels.core.polarization import Polarization -from cosipy.threeml import COSILike +from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood + from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, ThreeMLBinnedBackgroundInterface, BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface, - ThreeMLSourceResponseInterface) + ThreeMLSourceResponseInterface, + ThreeMLPluginInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -78,11 +80,12 @@ def set_parameters(self, **parameters:Any) -> None: def parameters(self) -> Dict[str, Any]: return {'norm': self._norm} - def expectation(self, axes: Axes) -> Histogram: + def expectation(self, axes: Axes, copy = True) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") + # Always a copy return self._unit_expectation * self._norm @@ -100,12 +103,12 @@ def __init__(self): # a "bare" parameter. self._threeml_parameters = {'norm':Parameter('norm', self._norm)} - def expectation(self, axes: Axes) -> Histogram: + def expectation(self, axes: Axes, copy = True) -> Histogram: # Overrides ToyBkg expectation # Update, inn case it changed externally self.set_parameters(norm = self._threeml_parameters['norm'].value) - return super().expectation(axes) + return super().expectation(axes, copy = copy) @property def threeml_parameters(self) -> Dict[str, Parameter]: @@ -134,19 +137,18 @@ def set_source(self, source: Source): self._source = source - def expectation(self, axes: Axes) -> Histogram: + def expectation(self, axes: Axes, copy = True) -> Histogram: if axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") if self._source is None: raise RuntimeError("Set a source first") - print(self._source.to_dict()) - # Get the latest values of the flux # Remember that _model can be modified externally between calls. flux = self._source.spectrum.main.shape.k.value + # Always copies return self._unit_expectation * flux def copy(self) -> "ToyPointSourceResponse": @@ -167,12 +169,13 @@ def set_model(self, model: Model): psr_copy.set_source(source) self._psr_copies[name] = psr_copy - def expectation(self, axes: Axes) -> Histogram: + def expectation(self, axes: Axes, copy = True) -> Histogram: expectation = Histogram(axes) for source_name,psr in self._psr_copies.items(): - expectation += psr.expectation(axes) + expectation += psr.expectation(axes, copy = False) + # Always a copy return expectation # ======= Actual code. This is how the "tutorial" will look like ================ @@ -216,7 +219,7 @@ def expectation(self, axes: Axes) -> Histogram: #model = Model() # Uncomment for bkg-only hypothesis # Fit -cosi = COSILike('cosi', data, response, bkg) +cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) plugins = DataList(cosi) like = JointLikelihood(model, plugins) like.fit() diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 7c2efafa..831ec0ec 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -1,14 +1,15 @@ +from cosipy.statistics import PoissonLikelihood from histpy import Histogram from cosipy.background_estimation import FreeNormThreeMLBinnedBackground -from cosipy.threeml import COSILike +from cosipy.interfaces import ThreeMLPluginInterface from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse from threeML import Band, PointSource, Model, JointLikelihood, DataList from cosipy.util import fetch_wasabi_file from cosipy.spacecraftfile import SpacecraftFile from astropy import units as u -from cosipy import COSILike, BinnedData +from cosipy import BinnedData from cosipy.spacecraftfile import SpacecraftFile from cosipy.response.FullDetectorResponse import FullDetectorResponse from cosipy.util import fetch_wasabi_file @@ -125,7 +126,7 @@ def data(self) -> Histogram: # Optional: if you want to call get_log_like manually, then you also need to set the model manually # 3ML does this internally during the fit though - cosi = COSILike('cosi', data, response, bkg) + cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) plugins = DataList(cosi) like = JointLikelihood(model, plugins) like.fit() From 8229d966df0c44c24c5ea751e63de2a1473e9328 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 12:52:57 -0400 Subject: [PATCH 037/133] Automatically handle bare background to 3ML Parameter conversion. Fix nans, norm was not being correctly set. Now it doesn't converge... Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 45 ++++--------- cosipy/interfaces/background_interface.py | 43 ++----------- cosipy/interfaces/likelihood_interface.py | 3 +- cosipy/interfaces/threeml_plugin_interface.py | 59 ++++++++++++++--- docs/api/interfaces/toy_interfaces_example.py | 64 +++++-------------- .../using_COSILike_with_interfaces.py | 4 +- 6 files changed, 86 insertions(+), 132 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 2f1b252a..b53a48a8 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -5,10 +5,11 @@ from histpy import Histogram from histpy import Axes -from cosipy.interfaces import ThreeMLBinnedBackgroundInterface, BinnedBackgroundInterface +from astropy import units as u -__all__ = ["FreeNormBinnedBackground", - "FreeNormThreeMLBinnedBackground"] +from cosipy.interfaces import BinnedBackgroundInterface + +__all__ = ["FreeNormBinnedBackground"] class FreeNormBinnedBackground(BinnedBackgroundInterface): """ @@ -90,29 +91,27 @@ def set_norm(self, *args, **kwargs): for n,norm in enumerate(args): self._set_norm(n, norm) - for label, bkg in kwargs.items(): + for label,norm in kwargs.items(): self._set_norm(label, norm) def _set_norm(self, label, norm): label = self._standardized_label(label) - if label in self.labels: + if label not in self.labels: raise RuntimeError(f"Component {label} doesn't exist") self._norms[label] = norm - def set_parameters(self, **parameters:Any) -> None: + def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: """ Same keys as background components """ - - self.set_norm(**parameters) + self.set_norm(**{l:p.value for l,p in parameters.items()}) @property - def parameters(self) -> Dict[str, Any]: - - return self._norms + def parameters(self) -> Dict[str, u.Quantity]: + return {l:u.Quantity(n) for l,n in self._norms.items()} def expectation(self, axes:Axes, copy:bool)->Histogram: """ @@ -151,7 +150,7 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: # Compute expectation for label in self.labels: - self._expectation += self._norms[label] * self._components[label] + self._expectation += self._components[label] * self._norms[label] # Cache. Regular copy is enough since norm values are float en not mutable self._last_norm_values = self._norms.copy() @@ -161,25 +160,3 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: else: return self._expectation -class FreeNormThreeMLBinnedBackground(FreeNormBinnedBackground, ThreeMLBinnedBackgroundInterface): - - def __init__(self, *args:Tuple[Histogram], **kwargs:Dict[str, Histogram]): - - super().__init__(*args, **kwargs) - - # 3ML "Parameter" keeps track of a few more things than - # a "bare" parameter. - self._threeml_parameters = {label:Parameter(label, norm) for label,norm in self._norms.items()} - - def set_threeml_parameters(self, **parameters: Parameter): - self._threeml_parameters = parameters - self.set_parameters(**{label:parameter.value for label,parameter in parameters.items()}) - - @property - def threeml_parameters(self) -> Dict[str, Parameter]: - return self._threeml_parameters - - - - - diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index b86f986c..da655e1f 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -1,9 +1,11 @@ -from typing import Protocol, runtime_checkable, Dict, Any +from typing import Protocol, runtime_checkable, Dict, Any, Union import histpy import numpy as np import logging +import astropy.units as u + from astromodels import Parameter logger = logging.getLogger(__name__) @@ -14,36 +16,13 @@ "BackgroundInterface", "BinnedBackgroundInterface", "UnbinnedBackgroundInterface", - "ThreeMLBackgroundInterface", - "ThreeMLBinnedBackgroundInterface", - "ThreeMLUnbinnedBackgroundInterface", ] @runtime_checkable class BackgroundInterface(Protocol): - def set_parameters(self, **parameters:Any) -> None:... + def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None:... @property - def parameters(self) -> Dict[str, Any]:... - -@runtime_checkable -class ThreeMLBackgroundInterface(BackgroundInterface, Protocol): - """ - This must translate to/from regular parameters - with arbitrary type from/to 3ML parameters - """ - - def set_threeml_parameters(self, **parameters: Parameter): - """ - The Parameter objects are passed "as reference", and can change. - Remember to call set_parameters() before computing the expetation - """ - @property - def threeml_parameters(self)->Dict[str, Parameter]: - """ - Note than we need more information (e.g. bounds) than what you - get from base parameters property - """ - return {} # Silence warning + def parameters(self) -> Dict[str, u.Quantity]:... @runtime_checkable class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface, Protocol): @@ -51,20 +30,8 @@ class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface, No new methods, just the inherited one """ -@runtime_checkable -class ThreeMLBinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface, Protocol): - """ - No new methods, just the inherited one - """ - @runtime_checkable class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface, Protocol): """ No new methods, just the inherited one """ - -@runtime_checkable -class ThreeMLUnbinnedBackgroundInterface(BinnedBackgroundInterface, ThreeMLBackgroundInterface, Protocol): - """ - No new methods, just the inherited one - """ diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index 77ba442c..5bd14f64 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -6,8 +6,7 @@ from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface, ExpectationInterface from .data_interface import UnbinnedDataInterface, BinnedDataInterface, DataInterface -from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface, BackgroundInterface, \ - ThreeMLBackgroundInterface +from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface, BackgroundInterface @runtime_checkable class LikelihoodInterface(Protocol): diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py index 55d05385..4df31150 100644 --- a/cosipy/interfaces/threeml_plugin_interface.py +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -1,5 +1,6 @@ from typing import Dict +from cosipy.interfaces import BinnedThreeMLModelResponseInterface, ThreeMLModelResponseInterface from cosipy.interfaces.likelihood_interface import LikelihoodInterface from threeML import PluginPrototype, Parameter @@ -26,20 +27,57 @@ def __init__(self, self._like = likelihood - @property - def nuisance_parameters(self) -> Dict[str, Parameter]: - # Adds plugin name, required by 3ML code - # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 + # Check we can use this likelihood + if not isinstance(self._like.response, ThreeMLModelResponseInterface): + raise TypeError("ThreeMLPluginInterface needs a LikelihoodInterface using a response of type ThreeMLModelResponseInterface") + + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well if self._like.bkg is None: - return {} + self._threeml_bkg_parameters = {} else: - return {self._name + "_" + l:p for l,p in self._like.bkg.threeml_parameters.items()} + # 1. Adds plugin name, required by 3ML code + # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 + # 2. Translation to bkg bare parameters. 3ML "Parameter" keeps track of a few more things than a "bare" (Quantity) parameter. + self._threeml_bkg_parameters = {self._add_prefix_name(label): Parameter(label, param.value, unit=param.unit) for label, param in self._like.bkg.parameters.items()} + + # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter + self.bkg_parameter = ThreeMLPluginInterface._Bkg_parameter(self) + + def _add_prefix_name(self, label): + return self._name + "_" + label + + def _remove_prefix_name(self, label): + return label[len(self._name) + 1:] + + @property + def nuisance_parameters(self) -> Dict[str, Parameter]: + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well + return self._threeml_bkg_parameters def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Parameter]): - # Remove plugin name. Opposite of the nuisance_parameters property + # Currently, the only nuisance parameters are the ones for the bkg + # We could have systematics here as well + self._threeml_bkg_parameters = new_nuisance_parameters + + # Set underlying bkg model + self._update_bkg_parameters() + + def _update_bkg_parameters(self): + # 1. Remove plugin name. Opposite of the nuisance_parameters property + # 2. Convert to "bare" Quantity value if self._like.bkg is not None: - new_nuisance_parameters = {l[len(self._name)+1:]:p for l,p in new_nuisance_parameters.items()} - self._like.bkg.set_threeml_parameters(**new_nuisance_parameters) + self._like.bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in + self._threeml_bkg_parameters.items()}) + + class _Bkg_parameter: + # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter + def __init__(self, plugin): + self._plugin = plugin + def __getitem__(self, label): + # Adds plugin name, required by 3ML code + return self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(label)] def get_number_of_data_points(self) -> int: return self._like.nobservations @@ -48,6 +86,9 @@ def set_model(self, model): self._like.response.set_model(model) def get_log_like(self): + # Update underlying background object in case the Parameter objects changed internally + self._update_bkg_parameters() + return self._like.get_log_like() def inner_fit(self): diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 7544b837..119dc697 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -1,23 +1,22 @@ from typing import Dict, Any from astromodels.sources import Source -from astromodels import LinearPolarization, SpectralComponent +from astromodels import LinearPolarization, SpectralComponent, Parameter from astromodels.core.polarization import Polarization +import astropy.units as u -from cosipy.statistics import UnbinnedLikelihood, PoissonLikelihood +from cosipy.statistics import PoissonLikelihood from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, - ThreeMLBinnedBackgroundInterface, BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface, - ThreeMLSourceResponseInterface, - ThreeMLPluginInterface) + ThreeMLPluginInterface, BackgroundInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform -from threeML import Constant, PointSource, Model, JointLikelihood, DataList, Parameter +from threeML import Constant, PointSource, Model, JointLikelihood, DataList from matplotlib import pyplot as plt @@ -73,12 +72,12 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 - def set_parameters(self, **parameters:Any) -> None: - self._norm = parameters['norm'] + def set_parameters(self, **parameters:u.Quantity) -> None: + self._norm = parameters['norm'].value @property - def parameters(self) -> Dict[str, Any]: - return {'norm': self._norm} + def parameters(self) -> Dict[str, u.Quantity]: + return {'norm': u.Quantity(self._norm)} def expectation(self, axes: Axes, copy = True) -> Histogram: @@ -88,37 +87,6 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: # Always a copy return self._unit_expectation * self._norm - -class ToyThreeMLBkg(ToyBkg, ThreeMLBinnedBackgroundInterface): - """ - This class extends the core ToyBkg class by providing the extra - "translation" methods needed to interface with 3ML. - """ - - def __init__(self): - - super().__init__() - - # 3ML "Parameter" keeps track of a few more things than - # a "bare" parameter. - self._threeml_parameters = {'norm':Parameter('norm', self._norm)} - - def expectation(self, axes: Axes, copy = True) -> Histogram: - # Overrides ToyBkg expectation - # Update, inn case it changed externally - self.set_parameters(norm = self._threeml_parameters['norm'].value) - - return super().expectation(axes, copy = copy) - - @property - def threeml_parameters(self) -> Dict[str, Parameter]: - return self._threeml_parameters - - def set_threeml_parameters(self, **parameters: Parameter): - self._threeml_parameters = parameters - self.set_parameters(norm = parameters['norm'].value) - - class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface): """ This models a Gaussian signal in 1D, centered at 0 and with std = 1. @@ -186,7 +154,7 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: data = ToyData() psr = ToyPointSourceResponse() response = ToyModelResponse(psr) -bkg = ToyThreeMLBkg() +bkg = ToyBkg() ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV @@ -209,11 +177,6 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: model = Model(source) -# Here you can set the parameters initial values, bounds, etc. -# This is passed to the minimizer -bkg.threeml_parameters['norm'].value = 1 -spectrum.k.value = 1 - # Optional: Perform a background-only or a null-background fit #bkg = None # Uncomment for no bkg #model = Model() # Uncomment for bkg-only hypothesis @@ -222,6 +185,13 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) plugins = DataList(cosi) like = JointLikelihood(model, plugins) + +# Before the fit, you can set the parameters initial values, bounds, etc. +# This is passed to the minimizer +cosi.bkg_parameter['norm'].value = 1 +spectrum.k.value = 1 + +# Run minimizer like.fit() print(like.minimizer) diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 831ec0ec..3b025617 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -1,7 +1,7 @@ from cosipy.statistics import PoissonLikelihood from histpy import Histogram -from cosipy.background_estimation import FreeNormThreeMLBinnedBackground +from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse from threeML import Band, PointSource, Model, JointLikelihood, DataList @@ -98,7 +98,7 @@ def data(self) -> Histogram: bkg_tmax = 1842597550.0 bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] - bkg = FreeNormThreeMLBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) + bkg = FreeNormBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) # Response preparation # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') From 627ca84294479831bec7a8aae3e690dd252c0838 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 18:09:55 -0400 Subject: [PATCH 038/133] Fix some issues. Change defaults of free norm parameters. Runs and converges Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 78 ++++++++++--------- docs/api/interfaces/toy_interfaces_example.py | 5 +- .../using_COSILike_with_interfaces.py | 8 ++ 3 files changed, 51 insertions(+), 40 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index b53a48a8..fc907e7d 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -16,20 +16,20 @@ class FreeNormBinnedBackground(BinnedBackgroundInterface): This must translate to/from regular parameters with arbitrary type from/to 3ML parameters + Parameter names are "{label}_norm". Default to just "norm" is there was a single + unlabeled component """ - def __init__(self, *args:Tuple[Histogram], **kwargs:Dict[str, Histogram]): + def __init__(self, hist:Union[Histogram, Dict[str, Histogram]]): - self._components = {} - - for n,bkg in enumerate(args): - self._components[self._standardized_label(n)] = bkg - - for label, bkg in kwargs.items(): - if label in self.labels: - raise ValueError("Repeated bkg component label.") - - self._components[label] = bkg + if isinstance(hist, Histogram): + # Single component + self._components = {'bkg': hist} + self._norms = 1. + else: + # Multiple label components. + self._components = hist + self._norms = {f"{l}_norm":1. for l in self.labels} # These will be densify anyway since _expectation is dense # And histpy doesn't yet handle this operation efficiently @@ -50,29 +50,28 @@ def __init__(self, *args:Tuple[Histogram], **kwargs:Dict[str, Histogram]): if self._axes != bkg.axes: raise ValueError("All background components mus have the same axes") - self._norms = {l:1 for l in self.labels} - # Cache self._expectation = None self._last_norm_values = None - def _standardized_label(self, label:Union[str, int]): - if isinstance(label, str): - return label - else: - return f"bkg{label}" + @property + def _single_component(self): + return not isinstance(self._norms, dict) @property def norm(self): - if self.ncomponents != 1: + if not self._single_component: raise RuntimeError("This property can only be used for single-component models") - return next(iter(self._norms.values())) + return self._norms @property def norms(self): - return self._norms.values() + if self._single_component: + return {"norm": self._norms} + else: + return self._norms.items() @property def ncomponents(self): @@ -86,32 +85,35 @@ def meausured_axes(self): def labels(self): return self._components.keys() - def set_norm(self, *args, **kwargs): - - for n,norm in enumerate(args): - self._set_norm(n, norm) - - for label,norm in kwargs.items(): - self._set_norm(label, norm) + def set_norm(self, norm: Union[float, Dict[str, float]]): - def _set_norm(self, label, norm): + if self._single_component: + if isinstance(norm, dict): + self._norms = norm['norm'] + else: + self._norms = norm + else: + # Multiple - label = self._standardized_label(label) + if not isinstance(norm, dict): + raise TypeError("This a multi-component background. Provide labeled norm values in a dictionary") - if label not in self.labels: - raise RuntimeError(f"Component {label} doesn't exist") + for label,norm_i in norm.items(): + if label not in self._norms.keys(): + raise ValueError(f"Norm {label} not in {self._norms.keys()}") - self._norms[label] = norm + self._norms[label] = norm_i def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: """ Same keys as background components """ + self.set_norm(**{l:p.value for l,p in parameters.items()}) @property def parameters(self) -> Dict[str, u.Quantity]: - return {l:u.Quantity(n) for l,n in self._norms.items()} + return {l:u.Quantity(n) for l,n in self.norms.items()} def expectation(self, axes:Axes, copy:bool)->Histogram: """ @@ -137,7 +139,7 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: # First call. Initialize self._expectation = Histogram(self.meausured_axes) - elif self._norms == self._last_norm_values: + elif self.norms == self._last_norm_values: # No changes. Use cache if copy: return self._expectation.copy() @@ -149,11 +151,11 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: self._expectation.clear() # Compute expectation - for label in self.labels: - self._expectation += self._components[label] * self._norms[label] + for norm,bkg in zip(self.norms.values(), self._components.values()): + self._expectation += bkg * norm # Cache. Regular copy is enough since norm values are float en not mutable - self._last_norm_values = self._norms.copy() + self._last_norm_values = self.norms.copy() if copy: return self._expectation.copy() diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 119dc697..6abf2127 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -159,6 +159,7 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV spectrum = Constant() +spectrum.k.value = 1 polarized = False @@ -187,9 +188,9 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: like = JointLikelihood(model, plugins) # Before the fit, you can set the parameters initial values, bounds, etc. -# This is passed to the minimizer +# This is passed to the minimizer. +# In addition to model. Nuisanse. cosi.bkg_parameter['norm'].value = 1 -spectrum.k.value = 1 # Run minimizer like.fit() diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 3b025617..cf48dba8 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -129,6 +129,14 @@ def data(self) -> Histogram: cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) plugins = DataList(cosi) like = JointLikelihood(model, plugins) + + # Nuisance bounds + cosi.bkg_parameter['norm'].value = .1 + cosi.bkg_parameter['norm'].min_value = 0 + cosi.bkg_parameter['norm'].max_value = 5 + cosi.bkg_parameter['norm'].delta = 1e-3 + + # Fit like.fit() results = like.results From 7955cf2e7b9669f26c8dc19b787289ea629b9f91 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 18:34:04 -0400 Subject: [PATCH 039/133] Allows to set full new parameter Signed-off-by: Israel Martinez --- cosipy/interfaces/threeml_plugin_interface.py | 19 +++++++++++++++---- docs/api/interfaces/toy_interfaces_example.py | 2 +- .../using_COSILike_with_interfaces.py | 15 +++++++++------ 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py index 4df31150..58155bd9 100644 --- a/cosipy/interfaces/threeml_plugin_interface.py +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -64,20 +64,31 @@ def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Paramete # Set underlying bkg model self._update_bkg_parameters() - def _update_bkg_parameters(self): + def _update_bkg_parameters(self, name = None): # 1. Remove plugin name. Opposite of the nuisance_parameters property # 2. Convert to "bare" Quantity value if self._like.bkg is not None: - self._like.bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in + if name is None: + #Update all + self._like.bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in self._threeml_bkg_parameters.items()}) + else: + # Only specific value + self._like.bkg.set_parameters(**{name:self._threeml_bkg_parameters[self._add_prefix_name(name)].as_quantity}) class _Bkg_parameter: # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter def __init__(self, plugin): self._plugin = plugin - def __getitem__(self, label): + def __getitem__(self, name): # Adds plugin name, required by 3ML code - return self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(label)] + return self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(name)] + def __setitem__(self, name, param: Parameter): + if param.name != self[name].name: + raise ValueError(f"Name of new set parameter need to match existing parameters ({param.name} != {self[name].name})") + self._plugin._threeml_bkg_parameters[self._plugin._add_prefix_name(name)] = param + self._plugin._update_bkg_parameters(name) + def get_number_of_data_points(self) -> int: return self._like.nobservations diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 6abf2127..c2193614 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -189,7 +189,7 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: # Before the fit, you can set the parameters initial values, bounds, etc. # This is passed to the minimizer. -# In addition to model. Nuisanse. +# In addition to model. Nuisance. cosi.bkg_parameter['norm'].value = 1 # Run minimizer diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index cf48dba8..40054925 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -127,14 +127,17 @@ def data(self) -> Histogram: # Optional: if you want to call get_log_like manually, then you also need to set the model manually # 3ML does this internally during the fit though cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) - plugins = DataList(cosi) - like = JointLikelihood(model, plugins) # Nuisance bounds - cosi.bkg_parameter['norm'].value = .1 - cosi.bkg_parameter['norm'].min_value = 0 - cosi.bkg_parameter['norm'].max_value = 5 - cosi.bkg_parameter['norm'].delta = 1e-3 + cosi.bkg_parameter['norm'] = Parameter("norm", # background parameter + 0.1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value=5, # maximum value of parameter + delta=1e-3, # initial step used by fitting engine + desc="Background parameter for cosi") + + plugins = DataList(cosi) + like = JointLikelihood(model, plugins) # Fit like.fit() From 4eed7956e389690945d71a2e7564a39241b4456d Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 18:43:26 -0400 Subject: [PATCH 040/133] Reproduce the GRB tutorial fit results. - Missing expectation fudge 1e-12 value - Ori time cut was not being applied Signed-off-by: Israel Martinez --- cosipy/statistics/likelihood_functions.py | 7 ++++++ .../using_COSILike_with_interfaces.py | 24 +++++++++---------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 2aa71b27..011780b1 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -1,3 +1,7 @@ +import logging + +logger = logging.getLogger(__name__) + from cosipy.interfaces import (BinnedLikelihoodInterface, UnbinnedLikelihoodInterface, BinnedDataInterface, @@ -50,6 +54,9 @@ def get_log_like(self) -> float: expectation = expectation.contents data = self._data.data.contents + expectation += 1e-12 + logger.warning("Adding 1e-12 to each bin of the expectation to avoid log-likelihood = -inf.") + # Compute the log-likelihood: log_like = np.nansum(data * np.log(expectation) - expectation) diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index 40054925..cfe7d210 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -4,10 +4,6 @@ from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse -from threeML import Band, PointSource, Model, JointLikelihood, DataList -from cosipy.util import fetch_wasabi_file -from cosipy.spacecraftfile import SpacecraftFile -from astropy import units as u from cosipy import BinnedData from cosipy.spacecraftfile import SpacecraftFile @@ -34,6 +30,16 @@ def main(): + # Download data + data_path = Path("") # /path/to/files. Current dir by default + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') + # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') + # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') + # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + + # Data preparation + # Set model to fit l = 93. b = -53. @@ -66,11 +72,6 @@ def main(): model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) # Data preparation - data_path = Path("") # /path/to/files. Current dir by default - # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_bkg_binned_data.hdf5', output=str(data_path / 'grb_bkg_binned_data.hdf5'), checksum = 'fce391a4b45624b25552c7d111945f60') - # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/grb_binned_data.hdf5', output=str(data_path / 'grb_binned_data.hdf5'), checksum = 'fcf7022369b6fb378d67b780fc4b5db8') - # fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/grb_spectral_fit_local_frame/bkg_binned_data_1s_local.hdf5', output=str(data_path / 'bkg_binned_data_1s_local.hdf5'), checksum = 'b842a7444e6fc1a5dd567b395c36ae7f') - grb = BinnedData(data_path / "grb.yaml") grb_bkg = BinnedData(data_path / "grb.yaml") @@ -101,13 +102,10 @@ def data(self) -> Histogram: bkg = FreeNormBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) # Response preparation - # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') - # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') - tmin = Time(1842597410.0, format='unix') tmax = Time(1842597450.0, format='unix') ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) - sc_orientation = ori.select_interval(tmin, tmax) + ori = ori.select_interval(tmin, tmax) dr = FullDetectorResponse.open(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") From db1badf1136b3ffc6963416e91c4b3ca82ba5dcc Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 18:50:17 -0400 Subject: [PATCH 041/133] Change name of normalized bkg distributions Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index fc907e7d..b23d46cd 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -20,30 +20,30 @@ class FreeNormBinnedBackground(BinnedBackgroundInterface): unlabeled component """ - def __init__(self, hist:Union[Histogram, Dict[str, Histogram]]): + def __init__(self, distribution:Union[Histogram, Dict[str, Histogram]]): - if isinstance(hist, Histogram): + if isinstance(distribution, Histogram): # Single component - self._components = {'bkg': hist} + self._distributions = {'bkg': distribution} self._norms = 1. else: # Multiple label components. - self._components = hist + self._distributions = distribution self._norms = {f"{l}_norm":1. for l in self.labels} # These will be densify anyway since _expectation is dense # And histpy doesn't yet handle this operation efficiently # See Histogram._inplace_operation_handle_sparse() # Do it once and for all - for label, bkg in self._components.items(): + for label, bkg in self._distributions.items(): if bkg.is_sparse: - self._components[label] = bkg.to_dense() + self._distributions[label] = bkg.to_dense() if self.ncomponents == 0: raise ValueError("You need to input at least one components") self._axes = None - for bkg in self._components.values(): + for bkg in self._distributions.values(): if self._axes is None: self._axes = bkg.axes else: @@ -75,7 +75,7 @@ def norms(self): @property def ncomponents(self): - return len(self._components) + return len(self._distributions) @property def meausured_axes(self): @@ -83,7 +83,7 @@ def meausured_axes(self): @property def labels(self): - return self._components.keys() + return self._distributions.keys() def set_norm(self, norm: Union[float, Dict[str, float]]): @@ -151,7 +151,7 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: self._expectation.clear() # Compute expectation - for norm,bkg in zip(self.norms.values(), self._components.values()): + for norm,bkg in zip(self.norms.values(), self._distributions.values()): self._expectation += bkg * norm # Cache. Regular copy is enough since norm values are float en not mutable From c2ed9c67fda313147ac65bbfe814a2e036a59c22 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 18:50:54 -0400 Subject: [PATCH 042/133] Take fudge value out of the generic likelihood calculation, and applied to custom bkg data instead. Signed-off-by: Israel Martinez --- cosipy/statistics/likelihood_functions.py | 3 --- docs/api/interfaces/using_COSILike_with_interfaces.py | 11 ++++++++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 011780b1..4edc4a82 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -54,9 +54,6 @@ def get_log_like(self) -> float: expectation = expectation.contents data = self._data.data.contents - expectation += 1e-12 - logger.warning("Adding 1e-12 to each bin of the expectation to avoid log-likelihood = -inf.") - # Compute the log-likelihood: log_like = np.nansum(data * np.log(expectation) - expectation) diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/using_COSILike_with_interfaces.py index cfe7d210..f810ff2a 100644 --- a/docs/api/interfaces/using_COSILike_with_interfaces.py +++ b/docs/api/interfaces/using_COSILike_with_interfaces.py @@ -1,3 +1,5 @@ +import sys + from cosipy.statistics import PoissonLikelihood from histpy import Histogram @@ -99,7 +101,14 @@ def data(self) -> Histogram: bkg_tmax = 1842597550.0 bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] - bkg = FreeNormBinnedBackground(bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi')) + bkg_dist = bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi') + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + bkg_dist += sys.float_info.min + + bkg = FreeNormBinnedBackground(bkg_dist) # Response preparation tmin = Time(1842597410.0, format='unix') From 264760263edd966f9dc3d50c0dddc14500a3c4f6 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 19:34:45 -0400 Subject: [PATCH 043/133] Rename example. more generic than cosi Signed-off-by: Israel Martinez --- ...interfaces.py => example_grb_fit_threeml_plugin_interfaces.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/api/interfaces/{using_COSILike_with_interfaces.py => example_grb_fit_threeml_plugin_interfaces.py} (100%) diff --git a/docs/api/interfaces/using_COSILike_with_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py similarity index 100% rename from docs/api/interfaces/using_COSILike_with_interfaces.py rename to docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py From c43d40d36f14cb9133cab69a086824548dc54c68 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 4 May 2025 19:59:26 -0400 Subject: [PATCH 044/133] Cleanup a little. Change bkg default label. Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 13 +-- ...ample_grb_fit_threeml_plugin_interfaces.py | 79 +++++++------------ docs/api/interfaces/toy_interfaces_example.py | 3 +- 3 files changed, 38 insertions(+), 57 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index b23d46cd..3c975a82 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -16,15 +16,17 @@ class FreeNormBinnedBackground(BinnedBackgroundInterface): This must translate to/from regular parameters with arbitrary type from/to 3ML parameters - Parameter names are "{label}_norm". Default to just "norm" is there was a single + Parameter names are "{label}_norm". Default to "bkg_norm" is there was a single unlabeled component """ + _default_label = 'bkg' + def __init__(self, distribution:Union[Histogram, Dict[str, Histogram]]): if isinstance(distribution, Histogram): # Single component - self._distributions = {'bkg': distribution} + self._distributions = {self._default_label: distribution} self._norms = 1. else: # Multiple label components. @@ -69,7 +71,7 @@ def norm(self): @property def norms(self): if self._single_component: - return {"norm": self._norms} + return {f"{self._default_label}_norm": self._norms} else: return self._norms.items() @@ -89,12 +91,11 @@ def set_norm(self, norm: Union[float, Dict[str, float]]): if self._single_component: if isinstance(norm, dict): - self._norms = norm['norm'] + self._norms = norm[f'{self._default_label}_norm'] else: self._norms = norm else: # Multiple - if not isinstance(norm, dict): raise TypeError("This a multi-component background. Provide labeled norm values in a dictionary") @@ -109,7 +110,7 @@ def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: Same keys as background components """ - self.set_norm(**{l:p.value for l,p in parameters.items()}) + self.set_norm({l:p.value for l,p in parameters.items()}) @property def parameters(self) -> Dict[str, u.Quantity]: diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index f810ff2a..87e0eb22 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -16,14 +16,11 @@ from astropy.time import Time import astropy.units as u -from astropy.coordinates import SkyCoord -from astropy.stats import poisson_conf_interval import numpy as np import matplotlib.pyplot as plt from threeML import Band, PointSource, Model, JointLikelihood, DataList -from cosipy import Band_Eflux from astromodels import Parameter from pathlib import Path @@ -40,8 +37,6 @@ def main(): # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') - # Data preparation - # Set model to fit l = 93. b = -53. @@ -53,15 +48,12 @@ def main(): K = 1 / u.cm / u.cm / u.s / u.keV spectrum = Band() - spectrum.beta.min_value = -15.0 - spectrum.alpha.value = alpha spectrum.beta.value = beta spectrum.xp.value = xp.value spectrum.K.value = K.value spectrum.piv.value = piv.value - spectrum.xp.unit = xp.unit spectrum.K.unit = K.unit spectrum.piv.unit = piv.unit @@ -74,83 +66,70 @@ def main(): model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) # Data preparation - - grb = BinnedData(data_path / "grb.yaml") grb_bkg = BinnedData(data_path / "grb.yaml") bkg = BinnedData(data_path / "background.yaml") - grb.load_binned_data_from_hdf5(binned_data=data_path / "grb_binned_data.hdf5") grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") - # Generate interface on the fly. All we need is to implement this method - # @property - # def data(self) -> histpy.Histogram:... - - # We can move this to BinnedData later, but this showed the flexibility of using Protocols over abstract classes - data_hist = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') - - class BinnedDataAux: - @property - def data(self) -> Histogram: - return data_hist - - data = BinnedDataAux() + data = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') bkg_tmin = 1842597310.0 bkg_tmax = 1842597550.0 bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] - bkg_dist = bkg.binned_data.slice[{'Time':slice(bkg_min,bkg_max)}].project('Em', 'Phi', 'PsiChi') + bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') + + # Prepare instrument response and SC history + tmin = Time(1842597410.0, format='unix') + tmax = Time(1842597450.0, format='unix') + ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) + ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring + + dr = FullDetectorResponse.open( + data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and # it was added to the expectation, not the normalized bkg bkg_dist += sys.float_info.min + # ============ Interfaces ============== bkg = FreeNormBinnedBackground(bkg_dist) - # Response preparation - tmin = Time(1842597410.0, format='unix') - tmax = Time(1842597450.0, format='unix') - ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) - ori = ori.select_interval(tmin, tmax) - - dr = FullDetectorResponse.open(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") - - # Options for point sources psr = BinnedThreeMlPointSourceResponse(dr, ori) - # Option for extended sources - # Not yet implemented - #esr = BinnedThreeMLExtendedSourceResponse() - esr = None + response = BinnedThreeMLResponse(point_source_response = psr) - response = BinnedThreeMLResponse(point_source_response = psr, - extended_source_response = esr) + class BinnedDataAux: + # We can move this to BinnedData later, but this shows the flexibility of using Protocols over abstract classes + # BinnedDataAux is a "BinnedDataInterface" since it implements the data() method, even if it doesn't + # explicitly derive from BinnedDataInterface + @property + def data(self) -> Histogram: + return data + data_aux = BinnedDataAux() + like_fun = PoissonLikelihood(data_aux, response, bkg) - # Optional: if you want to call get_log_like manually, then you also need to set the model manually - # 3ML does this internally during the fit though - cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) + cosi = ThreeMLPluginInterface('cosi', like_fun) - # Nuisance bounds - cosi.bkg_parameter['norm'] = Parameter("norm", # background parameter + # Nuisance parameter guess, bounds, etc. + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter 0.1, # initial value of parameter min_value=0, # minimum value of parameter max_value=5, # maximum value of parameter delta=1e-3, # initial step used by fitting engine - desc="Background parameter for cosi") + ) + + # ======== Interfaces end ========== + # 3Ml fit. Same as before plugins = DataList(cosi) like = JointLikelihood(model, plugins) - - # Fit like.fit() - results = like.results - print(results.display()) if __name__ == "__main__": diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index c2193614..00dae4b6 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -120,6 +120,8 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: return self._unit_expectation * flux def copy(self) -> "ToyPointSourceResponse": + # We are not caching any results, so it's safe to do shallow copy without + # re-initializing any member. return copy.copy(self) class ToyModelResponse(BinnedThreeMLModelResponseInterface): @@ -132,7 +134,6 @@ def set_model(self, model: Model): self._psr_copies = {} for name,source in model.sources.items(): - psr_copy = self._psr.copy() psr_copy.set_source(source) self._psr_copies[name] = psr_copy From 4947be0a590d251f479a8a2b7f47aec815c69261 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 5 May 2025 13:29:30 -0400 Subject: [PATCH 045/133] Rename SpacecraftFile to SpacecraftHistory, since it no longer needs to come from a file. Signed-off-by: Israel Martinez --- .../ContinuumEstimation.py | 2 +- cosipy/data_io/UnBinnedData.py | 6 ++-- .../coordsys_conversion_matrix.py | 2 +- cosipy/image_deconvolution/exposure_table.py | 4 +-- .../interfaces/source_response_interface.py | 1 + cosipy/polarization/polarization_asad.py | 2 +- cosipy/response/FullDetectorResponse.py | 4 +-- .../response/threeml_point_source_response.py | 4 +-- cosipy/source_injector/source_injector.py | 2 +- cosipy/spacecraftfile/rsp_to_arf_rmf.py | 4 +-- cosipy/spacecraftfile/spacecraft_file.py | 22 +++++++------- .../image_deconvolution/unit_test_file.ipynb | 4 +-- cosipy/ts_map/TSMap.py | 2 +- cosipy/ts_map/fast_ts_fit.py | 8 ++--- ...ample_grb_fit_threeml_plugin_interfaces.py | 4 +-- .../BG_estimation_example.ipynb | 4 +-- .../511keV-Galactic-ImageDeconvolution.ipynb | 2 +- .../511keV-ScAtt-DataReduction.ipynb | 4 +-- .../511keV-ScAtt-ImageDeconvolution.ipynb | 2 +- docs/tutorials/index.rst | 4 +-- docs/tutorials/polarization/ASAD_method.ipynb | 4 +-- .../tutorials/response/DetectorResponse.ipynb | 6 ++-- .../PSR_with_Earth_occultation_example.ipynb | 6 ++-- docs/tutorials/response/SpacecraftFile.ipynb | 12 ++++---- .../extended_source_response_generator.py | 4 +-- ..._response_generator_with_multiple_nodes.py | 4 +-- ...e_response_generated_with_mutiple_nodes.py | 2 +- docs/tutorials/run_tutorials.yml | 2 +- .../Extended_source_injector.ipynb | 2 +- .../Point_source_injector.ipynb | 6 ++-- .../continuum_fit/crab/SpectralFit_Crab.ipynb | 4 +-- .../continuum_fit/grb/SpectralFit_GRB.ipynb | 4 +-- .../diffuse_511_spectral_fit.ipynb | 6 ++-- .../ts_map/Parallel_TS_map_computation.ipynb | 6 ++-- .../test_coordsys_conversion_matrix.py | 4 +-- .../test_exposure_table.py | 4 +-- tests/polarization/test_polarization_asad.py | 4 +-- tests/response/test_full_detector_response.py | 6 ++-- tests/source_injector/test_source_injector.py | 4 +-- .../spacecraftfile/test_arf_rmf_converter.py | 14 ++++----- tests/spacecraftfile/test_spacecraftfile.py | 30 +++++++++---------- tests/threeml/test_spectral_fitting.py | 4 +-- tests/ts_map/test_fast_ts_map.py | 8 ++--- 43 files changed, 117 insertions(+), 116 deletions(-) diff --git a/cosipy/background_estimation/ContinuumEstimation.py b/cosipy/background_estimation/ContinuumEstimation.py index e9f3369b..87eeec46 100644 --- a/cosipy/background_estimation/ContinuumEstimation.py +++ b/cosipy/background_estimation/ContinuumEstimation.py @@ -23,7 +23,7 @@ def calc_psr(self, sc_orientation, detector_response, coord, nside=16): ---------- ori_file : str Full path to orienation file. - sc_orientation : cosipy.spacecraftfile.SpacecraftFile + sc_orientation : cosipy.spacecraftfile.SpacecraftHistory Spacecraft orientation object. detector_response : str Full path to detector response file. diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index 0f8814e6..078d7065 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -7,7 +7,7 @@ import time import cosipy from cosipy.data_io import DataIO -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory import gzip import astropy.coordinates as astro_co import astropy.units as u @@ -441,7 +441,7 @@ def instrument_pointing(self): """ # Get ori info: - ori = SpacecraftFile.open(self.ori_file) + ori = SpacecraftHistory.open(self.ori_file) time_tags = ori._load_time x_pointings = ori.x_pointings z_pointings = ori.z_pointings @@ -842,7 +842,7 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): self.cosi_dataset = self.get_dict(unbinned_data) # Get ori info: - ori = SpacecraftFile.open(self.ori_file) + ori = SpacecraftHistory.open(self.ori_file) # Get bad time intervals: bti = self.find_bad_intervals(ori._time, ori.livetime) diff --git a/cosipy/image_deconvolution/coordsys_conversion_matrix.py b/cosipy/image_deconvolution/coordsys_conversion_matrix.py index 7cbe39e3..56ac6656 100644 --- a/cosipy/image_deconvolution/coordsys_conversion_matrix.py +++ b/cosipy/image_deconvolution/coordsys_conversion_matrix.py @@ -32,7 +32,7 @@ def time_binning_ccm(cls, full_detector_response, orientation, time_intervals, n ---------- full_detector_response : :py:class:`cosipy.response.FullDetectorResponse` Response - orientation : :py:class:`cosipy.spacecraftfile.SpacecraftFile` + orientation : :py:class:`cosipy.spacecraftfile.SpacecraftHistory` Orientation time_intervals : :py:class:`np.array` The same format of binned_data.axes['Time'].edges diff --git a/cosipy/image_deconvolution/exposure_table.py b/cosipy/image_deconvolution/exposure_table.py index 1e68726f..15951916 100644 --- a/cosipy/image_deconvolution/exposure_table.py +++ b/cosipy/image_deconvolution/exposure_table.py @@ -67,7 +67,7 @@ def from_orientation(cls, orientation, nside, scheme = 'ring', start = None, sto Parameters ---------- - orientation : :py:class:`cosipy.spacecraftfile.SpacecraftFile` + orientation : :py:class:`cosipy.spacecraftfile.SpacecraftHistory` Orientation nside : int Healpix NSIDE parameter. @@ -101,7 +101,7 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, Parameters ---------- - orientation : :py:class:`cosipy.spacecraftfile.SpacecraftFile` + orientation : :py:class:`cosipy.spacecraftfile.SpacecraftHistory` Orientation nside : int Healpix NSIDE parameter. diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 89f1fa32..fff27da0 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -15,6 +15,7 @@ @runtime_checkable class SourceResponseInterface(Protocol): ... + def set_spacecraft_history(self, sc_history): @runtime_checkable class ThreeMLModelResponseInterface(SourceResponseInterface, Protocol): diff --git a/cosipy/polarization/polarization_asad.py b/cosipy/polarization/polarization_asad.py index a7b33dba..b9c47b25 100644 --- a/cosipy/polarization/polarization_asad.py +++ b/cosipy/polarization/polarization_asad.py @@ -31,7 +31,7 @@ class PolarizationASAD(): Binned or unbinned data, or list of binned/unbinned data if separated in time background : dict or cosipy.data_io.BinnedData Binned or unbinned background model - sc_orientation : cosipy.spacecraftfile.SpacecraftFile.SpacecraftFile + sc_orientation : cosipy.spacecraftfile.SpacecraftHistory.SpacecraftHistory Spacecraft orientation response_file : str or pathlib.Path Path to detector response diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index c6fe3423..2afdcedb 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -983,7 +983,7 @@ def get_point_source_response_per_image_pixel(self, ipix_image, orientation, coo ---------- ipix_image : int HEALPix pixel index - orientation : cosipy.spacecraftfile.SpacecraftFile + orientation : cosipy.spacecraftfile.SpacecraftHistory Spacecraft attitude information coordsys : str, default 'galactic' Coordinate system (currently only 'galactic' is supported) @@ -1024,7 +1024,7 @@ def get_extended_source_response(self, orientation, coordsys = 'galactic', nside Parameters ---------- - orientation : cosipy.spacecraftfile.SpacecraftFile + orientation : cosipy.spacecraftfile.SpacecraftHistory Spacecraft attitude information coordsys : str, default 'galactic' Coordinate system (currently only 'galactic' is supported) diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 0287cd61..39ce796a 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -9,7 +9,7 @@ from cosipy.interfaces import BinnedThreeMLSourceResponseInterface from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import SpacecraftFile, SpacecraftAttitudeMap +from cosipy.spacecraftfile import SpacecraftHistory, SpacecraftAttitudeMap from mhealpy import HealpixMap @@ -30,7 +30,7 @@ class BinnedThreeMlPointSourceResponse(BinnedThreeMLSourceResponseInterface): def __init__(self, dr: FullDetectorResponse, - sc_orientation: SpacecraftFile, + sc_orientation: SpacecraftHistory, ): # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface diff --git a/cosipy/source_injector/source_injector.py b/cosipy/source_injector/source_injector.py index 24a3f768..8ae6f5a5 100644 --- a/cosipy/source_injector/source_injector.py +++ b/cosipy/source_injector/source_injector.py @@ -85,7 +85,7 @@ def inject_point_source(self, spectrum, coordinate, orientation = None, source_n The spectrum model defined from `astromodels`. coordinate : astropy.coordinates.SkyCoord The coordinate of the point source. - orientation : cosipy.spacecraftfile.SpacecraftFile, option + orientation : cosipy.spacecraftfile.SpacecraftHistory, option The orientation of the telescope during the mock simulation. This is needed when using a detector response. (the default is `None`, which means a galactic response is used. source_name : str, optional The name of the source (the default is `point_source`). diff --git a/cosipy/spacecraftfile/rsp_to_arf_rmf.py b/cosipy/spacecraftfile/rsp_to_arf_rmf.py index bfbd9cba..5593c080 100644 --- a/cosipy/spacecraftfile/rsp_to_arf_rmf.py +++ b/cosipy/spacecraftfile/rsp_to_arf_rmf.py @@ -1,7 +1,7 @@ import logging logger = logging.getLogger(__name__) -from .spacecraft_file import SpacecraftFile +from .spacecraft_file import SpacecraftHistory import numpy as np import astropy.units as u @@ -15,7 +15,7 @@ class RspArfRmfConverter: - def __init__(self, response:FullDetectorResponse, ori:SpacecraftFile, target_coord:SkyCoord): + def __init__(self, response:FullDetectorResponse, ori:SpacecraftHistory, target_coord:SkyCoord): self.response = response self.ori = ori diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index cb465edb..bc672524 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -21,9 +21,9 @@ import logging logger = logging.getLogger(__name__) -__all__ = ["SpacecraftFile"] +__all__ = ["SpacecraftHistory"] -class SpacecraftFile: +class SpacecraftHistory: def __init__(self, obstime: Time, @@ -129,7 +129,7 @@ def location(self)->EarthLocation: return self._location @classmethod - def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": + def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftHistory": """ Parses timestamps, axis positions from file and returns to __init__. @@ -148,7 +148,7 @@ def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": Returns ------- cosipy.spacecraftfile.spacecraft_file - The SpacecraftFile object. + The SpacecraftHistory object. """ file = Path(file) @@ -159,7 +159,7 @@ def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": raise ValueError(f"File format for {file} not supported") @classmethod - def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftFile": + def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftHistory": """ Parses an .ori txt file with MEGAlib formatting. @@ -183,7 +183,7 @@ def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "Space Returns ------- cosipy.spacecraftfile.spacecraft_file - The SpacecraftFile object. + The SpacecraftHistory object. """ # First and last line are read only by MEGAlib e.g. @@ -355,7 +355,7 @@ def cumulative_livetime(self, time: Time) -> u.Quantity: def interp_weights(self, times: Time): return self._hist.axis.interp_weights_edges(times) - def interp(self, times: Time) -> 'SpacecraftFile': + def interp(self, times: Time) -> 'SpacecraftHistory': """ Linearly interpolates attitude and position at a given obstime @@ -367,7 +367,7 @@ def interp(self, times: Time) -> 'SpacecraftFile': Returns ------- - A new SpacecraftFile object interpolated at these location + A new SpacecraftHistory object interpolated at these location """ if times.size < 2: @@ -383,9 +383,9 @@ def interp(self, times: Time) -> 'SpacecraftFile': return self.__class__(times, interp_attitude, interp_location, diff_livetime) - def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftFile": + def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHistory": """ - Returns the SpacecraftFile file class object for the source interval. + Returns the SpacecraftHistory file class object for the source interval. Parameters ---------- @@ -396,7 +396,7 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftFil Returns ------- - cosipy.spacecraft.SpacecraftFile + cosipy.spacecraft.SpacecraftHistory """ if start is None: diff --git a/cosipy/test_data/image_deconvolution/unit_test_file.ipynb b/cosipy/test_data/image_deconvolution/unit_test_file.ipynb index c9719bba..36e26519 100644 --- a/cosipy/test_data/image_deconvolution/unit_test_file.ipynb +++ b/cosipy/test_data/image_deconvolution/unit_test_file.ipynb @@ -23,7 +23,7 @@ "\n", "from cosipy import test_data\n", "from cosipy.image_deconvolution import CoordsysConversionMatrix, SpacecraftAttitudeExposureTable, AllSkyImageModel, DataIF_COSI_DC2\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "\n", "from cosipy.response import FullDetectorResponse" ] @@ -43,7 +43,7 @@ "metadata": {}, "outputs": [], "source": [ - "ori = SpacecraftFile.parse_from_file(test_data.path / \"20280301_first_10sec.ori\")\n", + "ori = SpacecraftHistory.parse_from_file(test_data.path / \"20280301_first_10sec.ori\")\n", "ori" ] }, diff --git a/cosipy/ts_map/TSMap.py b/cosipy/ts_map/TSMap.py index c463a698..b905862f 100644 --- a/cosipy/ts_map/TSMap.py +++ b/cosipy/ts_map/TSMap.py @@ -35,7 +35,7 @@ def link_model_all_plugins(self, dr, data, bkg, sc_orientation, piv, index, othe Binned data. Note: Eventually this should be a cosipy data class. bkg : histpy.Histogram Binned background model. Note: Eventually this should be a cosipy data class. - sc_orientation : cosipy.spacecraftfile.SpacecraftFile + sc_orientation : cosipy.spacecraftfile.SpacecraftHistory Contains the information of the orientation: timestamps (astropy.Time) and attitudes (scoord.Attitude) that describe the spacecraft for the duration of the data included in the analysis. piv : float diff --git a/cosipy/ts_map/fast_ts_fit.py b/cosipy/ts_map/fast_ts_fit.py index 323f7003..76fbc200 100644 --- a/cosipy/ts_map/fast_ts_fit.py +++ b/cosipy/ts_map/fast_ts_fit.py @@ -1,7 +1,7 @@ from histpy import Histogram, Axis, Axes import h5py as h5 import sys -from cosipy import SpacecraftFile +from cosipy import SpacecraftHistory from cosipy.response import PointSourceResponse import healpy as hp from mhealpy import HealpixMap @@ -37,7 +37,7 @@ def __init__(self, data, bkg_model, response_path, orientation = None, cds_frame Background model, which includes the background counts to model the background in the observed data. response_path : str or pathlib.Path The path to the response file. - orientation : cosipy.SpacecraftFile, optional + orientation : cosipy.SpacecraftHistory, optional The orientation of the spacecraft when data are collected (the default is `None`, which implies the orientation file is not needed). cds_frame : str, optional "local" or "galactic", it's the Compton data space (CDS) frame of the data, bkg_model and the response. In other words, they should have the same cds frame (the default is "local", which implied that a local frame that attached to the spacecraft). @@ -214,7 +214,7 @@ def get_ei_cds_array(hypothesis_coord, energy_channel, response_path, spectrum, The spectrum of the source. cds_frame : str, optional "local" or "galactic", it's the Compton data space (CDS) frame of the data, bkg_model and the response. In other words, they should have the same cds frame. - orientation : cosipy.spacecraftfile.SpacecraftFile, optional + orientation : cosipy.spacecraftfile.SpacecraftHistory, optional The orientation of the spacecraft when data are collected (the default is `None`, which implies the orientation file is not needed). Returns @@ -298,7 +298,7 @@ def fast_ts_fit(hypothesis_coord, The flattened Compton data space (CDS) array of the data. bkg_model_cds_array : numpy.ndarray The flattened Compton data space (CDS) array of the background model. - orientation : cosipy.spacecraftfile.SpacecraftFile + orientation : cosipy.spacecraftfile.SpacecraftHistory The orientation of the spacecraft when data are collected. response_path : str or pathlib.Path The path to the response file. diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index 87e0eb22..66793e1d 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -8,7 +8,7 @@ from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse from cosipy import BinnedData -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse from cosipy.util import fetch_wasabi_file @@ -83,7 +83,7 @@ def main(): # Prepare instrument response and SC history tmin = Time(1842597410.0, format='unix') tmax = Time(1842597450.0, format='unix') - ori = SpacecraftFile.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) + ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring dr = FullDetectorResponse.open( diff --git a/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb b/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb index 06cc93c4..a0990e62 100644 --- a/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb +++ b/docs/tutorials/background_estimation/continuum_estimation/BG_estimation_example.ipynb @@ -268,7 +268,7 @@ ], "source": [ "from cosipy.background_estimation import ContinuumEstimation\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.util import fetch_wasabi_file\n", "import os\n", "import logging\n", @@ -396,7 +396,7 @@ "ori_file = data_path/\"DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori\"\n", "\n", "# Spacecraft orientation:\n", - "sc_orientation = SpacecraftFile.parse_from_file(ori_file)\n", + "sc_orientation = SpacecraftHistory.parse_from_file(ori_file)\n", "\n", "crab = SkyCoord(l=184.56*u.deg,b=-5.78*u.deg,frame=\"galactic\")\n", "psr = instance.calc_psr(sc_orientation, dr, crab)\n", diff --git a/docs/tutorials/image_deconvolution/511keV/GalacticCDS/511keV-Galactic-ImageDeconvolution.ipynb b/docs/tutorials/image_deconvolution/511keV/GalacticCDS/511keV-Galactic-ImageDeconvolution.ipynb index 05c489fa..31b2d055 100644 --- a/docs/tutorials/image_deconvolution/511keV/GalacticCDS/511keV-Galactic-ImageDeconvolution.ipynb +++ b/docs/tutorials/image_deconvolution/511keV/GalacticCDS/511keV-Galactic-ImageDeconvolution.ipynb @@ -286,7 +286,7 @@ "from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic\n", "\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.ts_map.TSMap import TSMap\n", "from cosipy.data_io import UnBinnedData, BinnedData\n", "from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable, CoordsysConversionMatrix, DataIF_COSI_DC2, ImageDeconvolution\n", diff --git a/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-DataReduction.ipynb b/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-DataReduction.ipynb index 15ff0231..641f35df 100644 --- a/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-DataReduction.ipynb +++ b/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-DataReduction.ipynb @@ -302,7 +302,7 @@ "from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic\n", "\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.ts_map.TSMap import TSMap\n", "from cosipy.data_io import UnBinnedData, BinnedData\n", "from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable, CoordsysConversionMatrix\n", @@ -456,7 +456,7 @@ "%%time\n", "\n", "ori_filepath = path_data + \"20280301_3_month_with_orbital_info.ori\"\n", - "ori = SpacecraftFile.parse_from_file(ori_filepath)" + "ori = SpacecraftHistory.parse_from_file(ori_filepath)" ] }, { diff --git a/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-ImageDeconvolution.ipynb b/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-ImageDeconvolution.ipynb index 02a2e257..88edeab1 100644 --- a/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-ImageDeconvolution.ipynb +++ b/docs/tutorials/image_deconvolution/511keV/ScAttBinning/511keV-ScAtt-ImageDeconvolution.ipynb @@ -282,7 +282,7 @@ "from astropy.coordinates import SkyCoord, cartesian_to_spherical, Galactic\n", "\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.ts_map.TSMap import TSMap\n", "from cosipy.data_io import UnBinnedData, BinnedData\n", "from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable, CoordsysConversionMatrix, DataIF_COSI_DC2, ImageDeconvolution\n", diff --git a/docs/tutorials/index.rst b/docs/tutorials/index.rst index 37b0dff5..b7bf837b 100644 --- a/docs/tutorials/index.rst +++ b/docs/tutorials/index.rst @@ -16,7 +16,7 @@ List of tutorials and contents, as a link to the corresponding Python notebook i - Combining files. - Inspecting and plotting the data -2. Spacecraft orientation and location `(ipynb) `_ +2. Spacecraft orientation and location `(ipynb) `_ - SC file format and manipulation it —e.g. get a time range, rebin it. - The dwell time map and how to obtain it @@ -79,7 +79,7 @@ List of tutorials and contents, as a link to the corresponding Python notebook i :maxdepth: 1 Data format and handling - response/SpacecraftFile.ipynb + response/SpacecraftHistory.ipynb Detector response and signal expectation TS Map: localizing a GRB Fitting the spectrum of a GRB diff --git a/docs/tutorials/polarization/ASAD_method.ipynb b/docs/tutorials/polarization/ASAD_method.ipynb index 0fe0eeca..a6caaebe 100644 --- a/docs/tutorials/polarization/ASAD_method.ipynb +++ b/docs/tutorials/polarization/ASAD_method.ipynb @@ -24,7 +24,7 @@ "outputs": [], "source": [ "from cosipy import UnBinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.polarization.conventions import MEGAlibRelativeX, MEGAlibRelativeY, MEGAlibRelativeZ, IAUPolarizationConvention\n", "from cosipy.polarization.polarization_asad import PolarizationASAD\n", "from cosipy.threeml.custom_functions import Band_Eflux\n", @@ -158,7 +158,7 @@ "source": [ "response_file = data_path/'ResponseContinuum.o3.pol.e200_10000.b4.p12.relx.s10396905069491.m420.filtered.nonsparse.binnedpolarization.11D_nside8.area.good_chunks.h5' # e.g. ResponseContinuum.o3.pol.e200_10000.b4.p12.s10396905069491.m441.filtered.nonsparse.binnedpolarization.11D_nside8.area.h5\n", "\n", - "sc_orientation = SpacecraftFile.parse_from_file(data_path/'DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori') # e.g. DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori\n", + "sc_orientation = SpacecraftHistory.parse_from_file(data_path/'DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori') # e.g. DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori\n", "sc_orientation = sc_orientation.source_interval(Time(1835493492.2, format = 'unix'), Time(1835493492.8, format = 'unix'))" ] }, diff --git a/docs/tutorials/response/DetectorResponse.ipynb b/docs/tutorials/response/DetectorResponse.ipynb index 711e58e7..00456953 100644 --- a/docs/tutorials/response/DetectorResponse.ipynb +++ b/docs/tutorials/response/DetectorResponse.ipynb @@ -52,7 +52,7 @@ "\n", "from scoords import Attitude, SpacecraftFrame\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy import test_data\n", "from cosipy.util import fetch_wasabi_file\n", "from histpy import Histogram\n", @@ -487,7 +487,7 @@ ], "source": [ "# read the full oritation\n", - "ori = SpacecraftFile.parse_from_file(ori_path)\n", + "ori = SpacecraftHistory.parse_from_file(ori_path)\n", "\n", "# define the target coordinates (Crab)\n", "target_coord = SkyCoord(184.5551, -05.7877, unit = \"deg\", frame = \"galactic\")\n", @@ -910,7 +910,7 @@ "id": "145c3988-a437-42df-90c6-ac25384dd849", "metadata": {}, "source": [ - "You can also convert the point source response to XSPEC readable files (arf, rmf and pha) if you want to do spetral fitting or simulation in XSPEC. See the `SpacecraftFile` class functions `get_arf()`, `get_rmf()` and `get_pha()`, respectively." + "You can also convert the point source response to XSPEC readable files (arf, rmf and pha) if you want to do spetral fitting or simulation in XSPEC. See the `SpacecraftHistory` class functions `get_arf()`, `get_rmf()` and `get_pha()`, respectively." ] }, { diff --git a/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb b/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb index 1a5a6e69..3893afad 100644 --- a/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb +++ b/docs/tutorials/response/PSR_with_Earth_occultation_example.ipynb @@ -277,7 +277,7 @@ ], "source": [ "# Imports\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response import FullDetectorResponse\n", "from cosipy.util import fetch_wasabi_file\n", "import astropy.units as u\n", @@ -301,7 +301,7 @@ "outputs": [], "source": [ "ori_file = \"your/path/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth.ori\"\n", - "ori = SpacecraftFile.parse_from_file(ori_file)" + "ori = SpacecraftHistory.parse_from_file(ori_file)" ] }, { @@ -387,7 +387,7 @@ "outputs": [], "source": [ "ori_file = \"your/path/test_earth_occ.ori\"\n", - "ori = SpacecraftFile.parse_from_file(ori_file)" + "ori = SpacecraftHistory.parse_from_file(ori_file)" ] }, { diff --git a/docs/tutorials/response/SpacecraftFile.ipynb b/docs/tutorials/response/SpacecraftFile.ipynb index feffafa2..307903ce 100644 --- a/docs/tutorials/response/SpacecraftFile.ipynb +++ b/docs/tutorials/response/SpacecraftFile.ipynb @@ -13,7 +13,7 @@ "id": "cc657b2f-2276-45f1-8fcc-d089e9c69288", "metadata": {}, "source": [ - "The spacecraft is always moving and changing orientations. The attitude --i.e. orientation-- vs. time is handled by the SpacecraftFile class. This allows us to transform from spacecraft coordinates to inertial coordinate --e.g. galactics coordinates." + "The spacecraft is always moving and changing orientations. The attitude --i.e. orientation-- vs. time is handled by the SpacecraftHistory class. This allows us to transform from spacecraft coordinates to inertial coordinate --e.g. galactics coordinates." ] }, { @@ -22,7 +22,7 @@ "metadata": {}, "source": [ "
\n", - "Note: In future versions, the SpacecraftFile class will handle the spacecraft location --i.e. latitude, longitude, and altitude-- in addition to its attitude. This will allow us to know where the Earth is located in the field of view, which we are currently ignoring for simplicity.
" + "Note: In future versions, the SpacecraftHistory class will handle the spacecraft location --i.e. latitude, longitude, and altitude-- in addition to its attitude. This will allow us to know where the Earth is located in the field of view, which we are currently ignoring for simplicity." ] }, { @@ -60,7 +60,7 @@ "import os\n", "\n", "from cosipy.response import FullDetectorResponse\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.util import fetch_wasabi_file" ] }, @@ -208,7 +208,7 @@ "metadata": {}, "source": [ "
\n", - "Note: The orientation (.ori) file format will change in the future, from a text file to a FITS file. However, the file contents and the capabilities of the SpacecraftFile class will be the same.
" + "Note: The orientation (.ori) file format will change in the future, from a text file to a FITS file. However, the file contents and the capabilities of the SpacecraftHistory class will be the same." ] }, { @@ -216,7 +216,7 @@ "id": "975bc80f-5aef-4b71-b1a9-9ba79fdf76a8", "metadata": {}, "source": [ - "You don't have to remember the internal format though, just load it using the SpacecraftFile class:" + "You don't have to remember the internal format though, just load it using the SpacecraftHistory class:" ] }, { @@ -228,7 +228,7 @@ }, "outputs": [], "source": [ - "ori = SpacecraftFile.parse_from_file(ori_path)\n", + "ori = SpacecraftHistory.parse_from_file(ori_path)\n", "\n", "# Let's use only 1 hr in this example\n", "ori = ori.source_interval(ori.get_time()[0], ori.get_time()[0] + 1*u.hr)" diff --git a/docs/tutorials/response/extended_source_response_generator.py b/docs/tutorials/response/extended_source_response_generator.py index 9936ee57..edb9a2af 100644 --- a/docs/tutorials/response/extended_source_response_generator.py +++ b/docs/tutorials/response/extended_source_response_generator.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # file path @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.open(orientation_path) +orientation = SpacecraftHistory.open(orientation_path) # generate your extended source response extended_source_response = full_detector_response.get_extended_source_response(orientation, diff --git a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py index 7db71212..c1795e63 100644 --- a/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py +++ b/docs/tutorials/response/extended_source_response_generator_with_multiple_nodes.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # file path @@ -16,7 +16,7 @@ # load response and orientation full_detector_response = FullDetectorResponse.open(full_detector_response_path) -orientation = SpacecraftFile.open(orientation_path) +orientation = SpacecraftHistory.open(orientation_path) # set the healpix pixel index list ipix_image_list = [int(_) for _ in sys.argv[1:]] diff --git a/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py b/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py index 8e279e5c..a0e74547 100644 --- a/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py +++ b/docs/tutorials/response/merge_response_generated_with_mutiple_nodes.py @@ -7,7 +7,7 @@ logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response import FullDetectorResponse, ExtendedSourceResponse # load full detector response diff --git a/docs/tutorials/run_tutorials.yml b/docs/tutorials/run_tutorials.yml index 214aea9c..a933a753 100644 --- a/docs/tutorials/run_tutorials.yml +++ b/docs/tutorials/run_tutorials.yml @@ -44,7 +44,7 @@ tutorials: checksum: 408edb7dc2e3dce44c0f275e4ba56fd8 spacecraft_file: - notebook: response/SpacecraftFile.ipynb + notebook: response/SpacecraftHistory.ipynb wasabi_files: COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori: checksum: 416fcc296fc37a056a069378a2d30cb2 diff --git a/docs/tutorials/source_injector/Extended_source_injector.ipynb b/docs/tutorials/source_injector/Extended_source_injector.ipynb index 8aacdaab..e9973a80 100755 --- a/docs/tutorials/source_injector/Extended_source_injector.ipynb +++ b/docs/tutorials/source_injector/Extended_source_injector.ipynb @@ -50,7 +50,7 @@ "from scoords import SpacecraftFrame\n", "from astromodels import Gaussian, Gaussian_on_sphere, ExtendedSource, Parameter\n", "from astromodels.functions.function import Function1D, FunctionMeta\n", - "from cosipy import SpacecraftFile, SourceInjector\n", + "from cosipy import SpacecraftHistory, SourceInjector\n", "from cosipy.util import fetch_wasabi_file\n", "from cosipy.threeml.custom_functions import Wide_Asymm_Gaussian_on_sphere, SpecFromDat\n", "from threeML import Powerlaw, Band, PointSource, Model, ExtendedSource\n", diff --git a/docs/tutorials/source_injector/Point_source_injector.ipynb b/docs/tutorials/source_injector/Point_source_injector.ipynb index 79168e06..15be444e 100755 --- a/docs/tutorials/source_injector/Point_source_injector.ipynb +++ b/docs/tutorials/source_injector/Point_source_injector.ipynb @@ -41,7 +41,7 @@ "from pathlib import Path\n", "from astropy.coordinates import SkyCoord\n", "from astromodels.functions.function import Function1D, FunctionMeta\n", - "from cosipy import SpacecraftFile, SourceInjector\n", + "from cosipy import SpacecraftHistory, SourceInjector\n", "from histpy import Histogram\n", "from threeML import Powerlaw, Band, Model, PointSource\n", "from cosipy.threeml.custom_functions import SpecFromDat\n", @@ -196,7 +196,7 @@ "source": [ "# Read the 3-month orientation\n", "# It is the pointing of the spacecraft during the the mock simlulation\n", - "ori = SpacecraftFile.parse_from_file(orientation_path)" + "ori = SpacecraftHistory.parse_from_file(orientation_path)" ] }, { @@ -454,7 +454,7 @@ "source": [ "# Read the 3-month orientation\n", "# It is the pointing of the spacecraft during the the mock simlulation\n", - "ori = SpacecraftFile.parse_from_file(orientation_path)" + "ori = SpacecraftHistory.parse_from_file(orientation_path)" ] }, { diff --git a/docs/tutorials/spectral_fits/continuum_fit/crab/SpectralFit_Crab.ipynb b/docs/tutorials/spectral_fits/continuum_fit/crab/SpectralFit_Crab.ipynb index 7111e65e..2b59c3a6 100644 --- a/docs/tutorials/spectral_fits/continuum_fit/crab/SpectralFit_Crab.ipynb +++ b/docs/tutorials/spectral_fits/continuum_fit/crab/SpectralFit_Crab.ipynb @@ -323,7 +323,7 @@ ], "source": [ "from cosipy import COSILike, test_data, BinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response.FullDetectorResponse import FullDetectorResponse\n", "from cosipy.util import fetch_wasabi_file\n", "\n", @@ -477,7 +477,7 @@ "metadata": {}, "outputs": [], "source": [ - "sc_orientation = SpacecraftFile.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")" + "sc_orientation = SpacecraftHistory.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")" ] }, { diff --git a/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb b/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb index 2c908ba0..df109251 100644 --- a/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb +++ b/docs/tutorials/spectral_fits/continuum_fit/grb/SpectralFit_GRB.ipynb @@ -324,7 +324,7 @@ ], "source": [ "from cosipy import COSILike, BinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response.FullDetectorResponse import FullDetectorResponse\n", "from cosipy.util import fetch_wasabi_file\n", "\n", @@ -487,7 +487,7 @@ "metadata": {}, "outputs": [], "source": [ - "ori = SpacecraftFile.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")\n", + "ori = SpacecraftHistory.parse_from_file(data_path / \"20280301_3_month_with_orbital_info.ori\")\n", "tmin = Time(1842597410.0,format = 'unix')\n", "tmax = Time(1842597450.0,format = 'unix')\n", "sc_orientation = ori.source_interval(tmin, tmax)" diff --git a/docs/tutorials/spectral_fits/extended_source_fit/diffuse_511_spectral_fit.ipynb b/docs/tutorials/spectral_fits/extended_source_fit/diffuse_511_spectral_fit.ipynb index 0b44f798..f3c43d0e 100644 --- a/docs/tutorials/spectral_fits/extended_source_fit/diffuse_511_spectral_fit.ipynb +++ b/docs/tutorials/spectral_fits/extended_source_fit/diffuse_511_spectral_fit.ipynb @@ -46,7 +46,7 @@ "source": [ "# imports:\n", "from cosipy import COSILike, test_data, BinnedData\n", - "from cosipy.spacecraftfile import SpacecraftFile\n", + "from cosipy.spacecraftfile import SpacecraftHistory\n", "from cosipy.response.FullDetectorResponse import FullDetectorResponse\n", "from cosipy.response import PointSourceResponse\n", "from cosipy.threeml.custom_functions import Wide_Asymm_Gaussian_on_sphere, SpecFromDat\n", @@ -770,7 +770,7 @@ "source": [ "response_file = \"SMEXv12.511keV.HEALPixO4.binnedimaging.imagingresponse.nonsparse_nside16.area.h5\"\n", "response = FullDetectorResponse.open(response_file)\n", - "ori = SpacecraftFile.parse_from_file(\"20280301_3_month_with_orbital_info.ori\")\n", + "ori = SpacecraftHistory.parse_from_file(\"20280301_3_month_with_orbital_info.ori\")\n", "psr_file = \"psr_gal_511_DC2.h5\"" ] }, @@ -3159,7 +3159,7 @@ "# if not previously loaded in example 1, load the response, ori, and psr: \n", "response_file = \"SMEXv12.511keV.HEALPixO4.binnedimaging.imagingresponse.nonsparse_nside16.area.h5\"\n", "response = FullDetectorResponse.open(response_file)\n", - "ori = SpacecraftFile.parse_from_file(\"20280301_3_month_with_orbital_info.ori\")\n", + "ori = SpacecraftHistory.parse_from_file(\"20280301_3_month_with_orbital_info.ori\")\n", "psr_file = \"psr_gal_511_DC2.h5\"" ] }, diff --git a/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb b/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb index fb29de59..69da41b5 100644 --- a/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb +++ b/docs/tutorials/ts_map/Parallel_TS_map_computation.ipynb @@ -215,7 +215,7 @@ "%%capture\n", "# import necessary modules\n", "from threeML import Powerlaw\n", - "from cosipy import FastTSMap, SpacecraftFile\n", + "from cosipy import FastTSMap, SpacecraftHistory\n", "from cosipy.response import FullDetectorResponse\n", "import astropy.units as u\n", "from histpy import Histogram\n", @@ -514,7 +514,7 @@ "outputs": [], "source": [ "# read the full oritation but only get the interval for the GRB\n", - "ori_full = SpacecraftFile.parse_from_file(orientation_path)\n", + "ori_full = SpacecraftHistory.parse_from_file(orientation_path)\n", "grb_ori = ori_full.source_interval(Time(grb_tmin, format = \"unix\"), Time(grb_tmax, format = \"unix\"))\n", "\n", "# clear redundant data from RAM\n", @@ -788,7 +788,7 @@ "ax.set_ylabel(\"Counts\")\n", "\n", "# read the full oritation but only get the interval for the GRB\n", - "ori_full = SpacecraftFile.parse_from_file(orientation_path)\n", + "ori_full = SpacecraftHistory.parse_from_file(orientation_path)\n", "grb_ori = ori_full.source_interval(Time(grb_tmin, format = \"unix\"), Time(grb_tmax, format = \"unix\"))\n", "\n", "# clear redundant data from RAM\n", diff --git a/tests/image_deconvolution/test_coordsys_conversion_matrix.py b/tests/image_deconvolution/test_coordsys_conversion_matrix.py index 0858cd27..afc6ab1e 100644 --- a/tests/image_deconvolution/test_coordsys_conversion_matrix.py +++ b/tests/image_deconvolution/test_coordsys_conversion_matrix.py @@ -3,7 +3,7 @@ from cosipy import test_data from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable from cosipy.image_deconvolution import CoordsysConversionMatrix @@ -12,7 +12,7 @@ def test_coordsys_conversion_matrix_time(tmp_path): full_detector_response = FullDetectorResponse.open(test_data.path / "test_full_detector_response.h5") - ori = SpacecraftFile.open(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") ccm = CoordsysConversionMatrix.time_binning_ccm(full_detector_response, ori, [ori.get_time()[0].value, ori.get_time()[-1].value] * u.s) diff --git a/tests/image_deconvolution/test_exposure_table.py b/tests/image_deconvolution/test_exposure_table.py index 34ce34f2..c00bba83 100644 --- a/tests/image_deconvolution/test_exposure_table.py +++ b/tests/image_deconvolution/test_exposure_table.py @@ -2,13 +2,13 @@ from cosipy import test_data from cosipy.image_deconvolution import SpacecraftAttitudeExposureTable -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory def test_exposure_table(tmp_path): nside = 1 - ori = SpacecraftFile.open(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.get_time()[-1], min_exposure=0, min_num_pointings=1) == None diff --git a/tests/polarization/test_polarization_asad.py b/tests/polarization/test_polarization_asad.py index 4d8944fb..4b63bb80 100644 --- a/tests/polarization/test_polarization_asad.py +++ b/tests/polarization/test_polarization_asad.py @@ -5,7 +5,7 @@ from cosipy.polarization import PolarizationASAD from cosipy.polarization.conventions import IAUPolarizationConvention, MEGAlibRelativeZ -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory from cosipy import UnBinnedData from cosipy.threeml.custom_functions import Band_Eflux from cosipy import test_data @@ -13,7 +13,7 @@ analysis = UnBinnedData(test_data.path / 'polarization_data.yaml') data = analysis.get_dict_from_hdf5(test_data.path / 'polarization_data.hdf5') response_path = test_data.path / 'test_polarization_response_dense.h5' -sc_orientation = SpacecraftFile.open(test_data.path / 'polarization_ori.ori') +sc_orientation = SpacecraftHistory.open(test_data.path / 'polarization_ori.ori') attitude = sc_orientation.get_attitude()[0] a = 10. * u.keV diff --git a/tests/response/test_full_detector_response.py b/tests/response/test_full_detector_response.py index f58bf8e5..688e585a 100644 --- a/tests/response/test_full_detector_response.py +++ b/tests/response/test_full_detector_response.py @@ -9,7 +9,7 @@ from cosipy import test_data from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory response_path = test_data.path / "test_full_detector_response_dense.h5" orientation_path = test_data.path / "20280301_first_10sec.ori" @@ -79,7 +79,7 @@ def test_get_interp_response(): def test_get_extended_source_response(): - orientation = SpacecraftFile.open(orientation_path) + orientation = SpacecraftHistory.open(orientation_path) with FullDetectorResponse.open(response_path) as response: @@ -98,7 +98,7 @@ def test_get_extended_source_response(): def test_merge_psr_to_extended_source_response(tmp_path): - orientation = SpacecraftFile.open(orientation_path) + orientation = SpacecraftHistory.open(orientation_path) with FullDetectorResponse.open(response_path) as response: diff --git a/tests/source_injector/test_source_injector.py b/tests/source_injector/test_source_injector.py index 13af2e5b..dc0f59d0 100644 --- a/tests/source_injector/test_source_injector.py +++ b/tests/source_injector/test_source_injector.py @@ -1,4 +1,4 @@ -from cosipy import SpacecraftFile, SourceInjector +from cosipy import SpacecraftHistory, SourceInjector from astropy.coordinates import SkyCoord from threeML import Powerlaw from pathlib import Path @@ -15,7 +15,7 @@ def test_inject_point_source(): # defind the response and orientation response_path = test_data.path / "test_full_detector_response_dense.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.open(orientation_path) + ori = SpacecraftHistory.open(orientation_path) # powerlaw model index = -2.2 diff --git a/tests/spacecraftfile/test_arf_rmf_converter.py b/tests/spacecraftfile/test_arf_rmf_converter.py index ed4abadd..ada23afb 100644 --- a/tests/spacecraftfile/test_arf_rmf_converter.py +++ b/tests/spacecraftfile/test_arf_rmf_converter.py @@ -4,7 +4,7 @@ import numpy as np from astropy.coordinates import SkyCoord from astropy.io import fits -from cosipy import test_data, SpacecraftFile +from cosipy import test_data, SpacecraftHistory from cosipy.response import FullDetectorResponse from cosipy.spacecraftfile import RspArfRmfConverter @@ -14,7 +14,7 @@ def test_get_psr_rsp(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -80,7 +80,7 @@ def test_get_arf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -107,7 +107,7 @@ def test_get_rmf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -203,7 +203,7 @@ def test_get_pha(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -238,7 +238,7 @@ def test_plot_arf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) @@ -257,7 +257,7 @@ def test_plot_rmf(): response_path = test_data.path / "test_full_detector_response.h5" response = FullDetectorResponse.open(response_path) ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b=-05.7877, unit=(u.deg, u.deg), frame="galactic") converter = RspArfRmfConverter(response, ori, target_coord) diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index 19e24efb..e0114576 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -1,6 +1,6 @@ from cosipy.response import FullDetectorResponse from cosipy import test_data -from cosipy import SpacecraftFile +from cosipy import SpacecraftHistory import numpy as np import astropy.units as u from astropy.coordinates import SkyCoord @@ -13,7 +13,7 @@ def test_get_time(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) assert np.allclose(ori.obstime.unix, [1835478000.0, 1835478001.0, 1835478002.0, @@ -26,20 +26,20 @@ def test_read_only_selected_range(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path, - tstart=Time(1835478002.0, format = 'unix'), - tstop = Time(1835478008.0, format='unix') - ) + ori = SpacecraftHistory.open(ori_path, + tstart=Time(1835478002.0, format = 'unix'), + tstop = Time(1835478008.0, format='unix') + ) assert np.allclose(ori.obstime.unix, [1835478002.0, 1835478003.0, 1835478004.0, 1835478005.0, 1835478006.0, 1835478007.0, 1835478008.0, 1835478009.0]) - ori = SpacecraftFile.open(ori_path, - tstart=Time(1835478002.5, format = 'unix'), - tstop = Time(1835478007.5, format='unix') - ) + ori = SpacecraftHistory.open(ori_path, + tstart=Time(1835478002.5, format = 'unix'), + tstop = Time(1835478007.5, format='unix') + ) assert np.allclose(ori.obstime.unix, [1835478002.0, @@ -49,7 +49,7 @@ def test_read_only_selected_range(): def test_get_time_delta(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) time_delta = ori.intervals_duration.to_value(u.s) assert np.allclose(time_delta, np.array([1.000000, 1.000000, 1.000000, 1.000000, 1.000000, @@ -63,7 +63,7 @@ def test_get_time_delta(): def test_get_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) attitude = ori.attitude @@ -117,7 +117,7 @@ def test_get_attitude(): def test_get_target_in_sc_frame(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") @@ -135,7 +135,7 @@ def test_get_target_in_sc_frame(): def test_get_dwell_map(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) target_coord = SkyCoord(l=184.5551, b = -05.7877, unit = (u.deg, u.deg), frame = "galactic") @@ -148,7 +148,7 @@ def test_get_dwell_map(): def test_select_interval(): ori_path = test_data.path / "20280301_first_10sec.ori" - ori = SpacecraftFile.open(ori_path) + ori = SpacecraftHistory.open(ori_path) new_ori = ori.select_interval(ori.tstart+0.1*u.s, ori.tstart+2.1*u.s) diff --git a/tests/threeml/test_spectral_fitting.py b/tests/threeml/test_spectral_fitting.py index a200648f..35566248 100644 --- a/tests/threeml/test_spectral_fitting.py +++ b/tests/threeml/test_spectral_fitting.py @@ -1,5 +1,5 @@ from cosipy import COSILike, test_data, BinnedData -from cosipy.spacecraftfile import SpacecraftFile +from cosipy.spacecraftfile import SpacecraftHistory import astropy.units as u import numpy as np from threeML import Band, PointSource, Model, JointLikelihood, DataList @@ -8,7 +8,7 @@ data_path = test_data.path -sc_orientation = SpacecraftFile.open(data_path / "20280301_2s.ori") +sc_orientation = SpacecraftHistory.open(data_path / "20280301_2s.ori") dr = str(data_path / "test_full_detector_response.h5") # path to detector response data = BinnedData(data_path / "test_spectral_fit.yaml") diff --git a/tests/ts_map/test_fast_ts_map.py b/tests/ts_map/test_fast_ts_map.py index 5742a8f3..c1d16409 100644 --- a/tests/ts_map/test_fast_ts_map.py +++ b/tests/ts_map/test_fast_ts_map.py @@ -1,7 +1,7 @@ from cosipy import test_data from pytest import approx from threeML import Powerlaw -from cosipy import FastTSMap, SpacecraftFile +from cosipy import FastTSMap, SpacecraftHistory from histpy import Histogram import numpy as np from astropy.coordinates import SkyCoord @@ -16,7 +16,7 @@ def test_parallel_ts_fit(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.open(orientation_path) + ori = SpacecraftHistory.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi']) @@ -121,7 +121,7 @@ def test_get_ei_cds_array_detector(): response_path = test_data.path / "test_full_detector_response.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.open(orientation_path) + ori = SpacecraftHistory.open(orientation_path) index = -2.2 K = 10 / u.cm / u.cm / u.s / u.keV @@ -154,7 +154,7 @@ def test_fast_ts_fit(): bkg_path = test_data.path / "ts_map_bkg.h5" orientation_path = test_data.path / "20280301_2s.ori" - ori = SpacecraftFile.open(orientation_path) + ori = SpacecraftHistory.open(orientation_path) src_bkg = Histogram.open(src_bkg_path).project(['Em', 'PsiChi', 'Phi']) bkg = Histogram.open(bkg_path).project(['Em', 'PsiChi', 'Phi']) From 5bc9423f45d716bc81388c8481dbc72979d30bf1 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 6 May 2025 14:20:20 -0400 Subject: [PATCH 046/133] Move PolarizationASAD to different module to avoid circular import Signed-off-by: Israel Martinez --- cosipy/polarization/__init__.py | 1 - cosipy/polarization_fitting/__init__.py | 1 + .../{polarization => polarization_fitting}/polarization_asad.py | 0 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 cosipy/polarization_fitting/__init__.py rename cosipy/{polarization => polarization_fitting}/polarization_asad.py (100%) diff --git a/cosipy/polarization/__init__.py b/cosipy/polarization/__init__.py index 08187a3b..eadc83f2 100644 --- a/cosipy/polarization/__init__.py +++ b/cosipy/polarization/__init__.py @@ -1,3 +1,2 @@ -from .polarization_asad import PolarizationASAD from .conventions import PolarizationConvention, OrthographicConvention, StereographicConvention, IAUPolarizationConvention from .polarization_angle import PolarizationAngle diff --git a/cosipy/polarization_fitting/__init__.py b/cosipy/polarization_fitting/__init__.py new file mode 100644 index 00000000..eba26341 --- /dev/null +++ b/cosipy/polarization_fitting/__init__.py @@ -0,0 +1 @@ +from .polarization_asad import PolarizationASAD \ No newline at end of file diff --git a/cosipy/polarization/polarization_asad.py b/cosipy/polarization_fitting/polarization_asad.py similarity index 100% rename from cosipy/polarization/polarization_asad.py rename to cosipy/polarization_fitting/polarization_asad.py From c39d23d0ebf372c3623f71552aeffc0f1341134e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 6 May 2025 14:23:01 -0400 Subject: [PATCH 047/133] Move RspArfRmfConverter to different module to avoid circular import Signed-off-by: Israel Martinez --- cosipy/response/__init__.py | 2 ++ cosipy/{spacecraftfile => response}/rsp_to_arf_rmf.py | 2 +- cosipy/spacecraftfile/__init__.py | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) rename cosipy/{spacecraftfile => response}/rsp_to_arf_rmf.py (99%) diff --git a/cosipy/response/__init__.py b/cosipy/response/__init__.py index 57b3788c..6e23bdc8 100644 --- a/cosipy/response/__init__.py +++ b/cosipy/response/__init__.py @@ -4,3 +4,5 @@ from .ExtendedSourceResponse import ExtendedSourceResponse from .threeml_response import * from .threeml_point_source_response import * + +from .rsp_to_arf_rmf import RspArfRmfConverter \ No newline at end of file diff --git a/cosipy/spacecraftfile/rsp_to_arf_rmf.py b/cosipy/response/rsp_to_arf_rmf.py similarity index 99% rename from cosipy/spacecraftfile/rsp_to_arf_rmf.py rename to cosipy/response/rsp_to_arf_rmf.py index 5593c080..afdef1a0 100644 --- a/cosipy/spacecraftfile/rsp_to_arf_rmf.py +++ b/cosipy/response/rsp_to_arf_rmf.py @@ -1,7 +1,7 @@ import logging logger = logging.getLogger(__name__) -from .spacecraft_file import SpacecraftHistory +from cosipy.spacecraftfile.spacecraft_file import SpacecraftHistory import numpy as np import astropy.units as u diff --git a/cosipy/spacecraftfile/__init__.py b/cosipy/spacecraftfile/__init__.py index 4f12a593..9eb8444d 100644 --- a/cosipy/spacecraftfile/__init__.py +++ b/cosipy/spacecraftfile/__init__.py @@ -1,3 +1,2 @@ from .spacecraft_file import * -from .rsp_to_arf_rmf import RspArfRmfConverter from .scatt_map import SpacecraftAttitudeMap From b9b34f605be0b2dfa37d10b4f53835da3607c381 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 28 May 2025 11:10:08 -0400 Subject: [PATCH 048/133] Histpy polarization axis (draft) Signed-off-by: Israel Martinez --- cosipy/polarization/polarization_axis.py | 162 +++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 cosipy/polarization/polarization_axis.py diff --git a/cosipy/polarization/polarization_axis.py b/cosipy/polarization/polarization_axis.py new file mode 100644 index 00000000..5a59e98d --- /dev/null +++ b/cosipy/polarization/polarization_axis.py @@ -0,0 +1,162 @@ +import numpy as np +from .conventions import PolarizationConvention +from astropy import units as u + +from .polarization_angle import PolarizationAngle + +from histpy import Axis + + +class PolarizationAxis(Axis): + """ + Defines a polarization axis compatible with PolarizationAngle. + + Parameters: + edges (array-like): + Bin edges. Can be a Quantity array or PolarizationAngle + convention : PolarizationConvention + Convention defining the polarization basis in + the polarization plane (for which the source direction is normal). + Overrides the convention of "edges", if a PolarizationAngle object + was provided + label (str): Label for axis. If edges is an Axis object, this will + override its label + unit (unit-like): Unit for axis (will override unit of edges) + copy (bool): True if edge array should be distinct from passed-in + edges; if False, will use same edge array if possible + *args, **kwargs + Passed to convention class. + """ + + def __init__(self, + edges, + convention = 'iau', + label = None, + unit = None, + copy=True): + + if isinstance(edges, PolarizationAngle): + convention = edges.convention if convention is None else convention + edges = edges.angle + + super().__init__(edges, label = label, scale='linear', unit=unit, copy=copy) + + if self.unit is None: + raise ValueError("PolarizationAxis needs edges with units") + + self._convention = PolarizationConvention.get_convention(convention) + + @property + def convention(self): + return self._convention + + def _copy(self, edges=None, copy_edges=True): + """Make a deep copy of a HealpixAxis, optionally + replacing edge array. (The superclass's _copy + method handles edge replacement.) + """ + + new = super()._copy(edges, copy_edges) + + # self._convention is not copied. It's safe to share it. + + return new + + def _standardize_value(self, value): + if isinstance(value, PolarizationAngle): + # Transform to axis' convention + return value.transform_to(self.convention).angle + else: + return value + + def find_bin(self, value, right = False): + return super().find_bin(self._standardize_value(value), right = right) + + def interp_weights(self, values): + return super().interp_weights(self._standardize_value(values)) + + def interp_weights_edges(self, values): + return super().interp_weights_edges(self._standardize_value(values)) + + @property + def lower_bounds(self): + return PolarizationAngle(super().lower_bounds, convention=self.convention) + + @property + def upper_bounds(self): + return PolarizationAngle(super().upper_bounds, convention=self.convention) + + @property + def bounds(self): + return PolarizationAngle(super().bounds, convention=self.convention) + + @property + def lo_lim(self): + return PolarizationAngle(super().lo_lim, convention=self.convention) + + @property + def hi_lim(self): + return PolarizationAngle(super().hi_lim, convention=self.convention) + + @property + def edges(self): + return PolarizationAngle(super().edges, convention=self.convention) + + @property + def centers(self): + return PolarizationAngle(super().centers, convention=self.convention) + + def _write_metadata(self, axis_set): + """ + Save extra metadata to existing dataset + """ + + super()._write_metadata(axis_set) + + convention = PolarizationConvention.get_convention_registered_name(self._convention) + + if convention is None: + raise RuntimeError(f"Only PolarizationAxis object with a registered named convention " + "can be saved disk") + + axis_set.attrs['convention'] = convention + + @classmethod + def _open(cls, dataset): + """ + Create Axis from HDF5 dataset + Written as a virtual constructor so that + subclasses may override + """ + + + edges = np.asarray(dataset) + + metadata = cls._open_metadata(dataset) + + new = cls.__new__(cls) + PolarizationAxis.__init__(new, + edges = edges, + unit = metadata['unit'], + convention = metadata['convention'], + label = metadata['label'], + copy = False) + + return new + + @classmethod + def _open_metadata(cls, dataset): + """ + Returns unit, label and scale as a dictionary + """ + + metadata = super()._open_metadata(dataset) + + metadata['convention'] = dataset['convention'] + + return metadata + + + + + From a5b67bf86b63c21a2671c5902275d2ca98536a7e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 3 Jun 2025 16:39:41 -0400 Subject: [PATCH 049/133] Generate PSR from a generic BinnedInstrumentResponseInterface. - This reproduces the v0.3 GRB fit. - Only local coordinates for now. - No polarization yet. Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 8 +- cosipy/data_io/BinnedData.py | 40 ++++++- cosipy/data_io/__init__.py | 2 +- cosipy/interfaces/__init__.py | 2 +- cosipy/interfaces/data_interface.py | 7 +- cosipy/interfaces/expectation_interface.py | 5 +- .../instrument_response_interface.py | 32 +++++ cosipy/interfaces/likelihood_interface.py | 40 ++++--- .../interfaces/source_response_interface.py | 15 +-- cosipy/polarization/__init__.py | 1 + cosipy/polarization/conventions.py | 16 +++ cosipy/polarization/polarization_angle.py | 15 ++- cosipy/response/FullDetectorResponse.py | 112 ++++++++++-------- cosipy/response/PointSourceResponse.py | 82 +++++++------ cosipy/response/__init__.py | 2 +- cosipy/response/instrument_response.py | 67 +++++++++++ .../response/threeml_point_source_response.py | 92 +++++++++----- cosipy/response/threeml_response.py | 13 +- cosipy/statistics/likelihood_functions.py | 35 ++++-- cosipy/threeml/util.py | 34 ++++++ ...ample_grb_fit_threeml_plugin_interfaces.py | 56 ++++++--- docs/api/interfaces/toy_interfaces_example.py | 44 +++++-- 22 files changed, 522 insertions(+), 198 deletions(-) create mode 100644 cosipy/interfaces/instrument_response_interface.py create mode 100644 cosipy/response/instrument_response.py create mode 100644 cosipy/threeml/util.py diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 3c975a82..2671b8fd 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -7,7 +7,7 @@ from astropy import units as u -from cosipy.interfaces import BinnedBackgroundInterface +from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface __all__ = ["FreeNormBinnedBackground"] @@ -116,12 +116,12 @@ def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: def parameters(self) -> Dict[str, u.Quantity]: return {l:u.Quantity(n) for l,n in self.norms.items()} - def expectation(self, axes:Axes, copy:bool)->Histogram: + def expectation(self, data:BinnedDataInterface, copy:bool)->Histogram: """ Parameters ---------- - axes + data copy: If True, it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that @@ -132,7 +132,7 @@ def expectation(self, axes:Axes, copy:bool)->Histogram: """ - if axes != self.meausured_axes: + if data.axes != self.meausured_axes: raise ValueError("Requested axes do not match the background component axes") # Check if we can use the cache diff --git a/cosipy/data_io/BinnedData.py b/cosipy/data_io/BinnedData.py index 034307a9..8249c4a0 100644 --- a/cosipy/data_io/BinnedData.py +++ b/cosipy/data_io/BinnedData.py @@ -2,7 +2,7 @@ import sys import numpy as np import h5py -from histpy import Histogram, HealpixAxis, Axis +from histpy import Histogram, HealpixAxis, Axis, Axes from scoords import SpacecraftFrame, Attitude from mhealpy import HealpixMap, HealpixBase import healpy as hp @@ -12,6 +12,9 @@ import logging import astropy.units as u from astropy.coordinates import SkyCoord + +from cosipy.interfaces import BinnedDataInterface + logger = logging.getLogger(__name__) @@ -497,3 +500,38 @@ def get_raw_lightcurve(self, binned_data=None, output_name=None, show_plots=Fals df.to_csv("%s.dat" %output_name,index=False,sep="\t",columns=["Time[UTC]","Rate[ct/s]"]) return + + def get_em_cds(self): + return EmCDSBinnedData(self.binned_data.project('Em', 'Phi', 'PsiChi')) + +class EmCDSBinnedData(BinnedDataInterface): + """ + Measured energy (Em), Compton polar scattering angle (Phi), and the scattering direction (PsiChi). + Phi and PsiChi are the Compton Data Space (CDS). No time dependence + """ + def __init__(self, data:Histogram): + + # Checks + if set(data.axes.labels) != {'Em', 'Phi', 'PsiChi'}: + raise ValueError(f"Wrong axes. 'Em', 'Psi', 'PsiChi' expected.") + + if not data.axes['Em'].unit.is_equivalent(u.keV): + raise ValueError(f"Em axis should have units of energy") + + if not data.axes['Phi'].unit.is_equivalent(u.deg): + raise ValueError(f"Psi axis should have angle units") + + if not isinstance(data.axes['PsiChi'],HealpixAxis): + raise ValueError(f"PsiChi must be of type {HealpixAxis}.") + + if data.axes['PsiChi'].coordsys is None: + raise ValueError(f"PsiChi axes must have a coordinate system.") + + self._data = data + + @property + def data(self) -> Histogram: + return self._data + @property + def axes(self) -> Axes: + return self._data.axes \ No newline at end of file diff --git a/cosipy/data_io/__init__.py b/cosipy/data_io/__init__.py index 9e6394fe..8401a612 100644 --- a/cosipy/data_io/__init__.py +++ b/cosipy/data_io/__init__.py @@ -1,4 +1,4 @@ from .DataIO import DataIO from .UnBinnedData import UnBinnedData -from .BinnedData import BinnedData +from .BinnedData import BinnedData, EmCDSBinnedData from .ReadTraTest import ReadTraTest diff --git a/cosipy/interfaces/__init__.py b/cosipy/interfaces/__init__.py index 2b0e919e..b272f498 100644 --- a/cosipy/interfaces/__init__.py +++ b/cosipy/interfaces/__init__.py @@ -5,5 +5,5 @@ from .expectation_interface import * from .source_response_interface import * from .threeml_plugin_interface import * - +from .instrument_response_interface import * diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 5c8b6704..d32225f4 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,5 +1,7 @@ from typing import Protocol, runtime_checkable, Dict, Type, Any +from histpy import Histogram, Axes + from .measurements import Measurements import histpy @@ -22,8 +24,9 @@ def measurements(self) -> Measurements:... @runtime_checkable class BinnedDataInterface(DataInterface, Protocol): @property - def data(self) -> histpy.Histogram:... - + def data(self) -> Histogram:... + @property + def axes(self) -> Axes:... diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 562bdd0c..635c2c03 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -2,6 +2,7 @@ import histpy import numpy as np +from cosipy.interfaces import BinnedDataInterface from .measurements import Measurements @@ -15,12 +16,12 @@ class ExpectationInterface(Protocol):... @runtime_checkable class BinnedExpectationInterface(ExpectationInterface, Protocol): - def expectation(self, axes:histpy.Axes, copy:bool)->histpy.Histogram: + def expectation(self, data:BinnedDataInterface, copy:bool)->histpy.Histogram: """ Parameters ---------- - axes + data copy: If True, it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py new file mode 100644 index 00000000..2799ddf7 --- /dev/null +++ b/cosipy/interfaces/instrument_response_interface.py @@ -0,0 +1,32 @@ +from typing import Protocol + +from astropy.coordinates import SkyCoord +from histpy import Axes, Histogram + +from astropy import units as u + +from cosipy.polarization import PolarizationAngle + +__all__ = ["BinnedInstrumentResponseInterface"] + +class BinnedInstrumentResponseInterface(Protocol): + + def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle) -> Histogram: + """ + + Parameters + ---------- + axes: + Measured axes + direction: + Photon incoming direction in SC coordinates + energy: + Photon energy + polarization + Photon polarization angle + + Returns + ------- + The effective area times the event measurement probability distribution integrated on each of the bins + of the provided axes + """ diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index 5bd14f64..7bd86ab0 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -10,15 +10,13 @@ @runtime_checkable class LikelihoodInterface(Protocol): - def __init__(self, - data: DataInterface, - response: ExpectationInterface, - bkg: BackgroundInterface, - *args, **kwargs):... def get_log_like(self) -> float:... @property def nobservations(self) -> int: """For BIC and other statistics""" + def set_data(self, data: DataInterface):... + def set_response(self, response: ExpectationInterface): ... + def set_background(self, bkg: BackgroundInterface): ... @property def data (self) -> DataInterface: ... @property @@ -31,20 +29,32 @@ class BinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ Needs to check that data, response and bkg are compatible """ - def __init__(self, - data: BinnedDataInterface, - response: BinnedExpectationInterface, - bkg: BinnedBackgroundInterface, - *args, **kwargs):... + def set_data(self, data: DataInterface): + if not isinstance(data, BinnedDataInterface): + raise TypeError("Incorrect data type for binned likelihood.") + + def set_response(self, response: ExpectationInterface): + if not isinstance(response, BinnedExpectationInterface): + raise TypeError("Incorrect data type for binned likelihood.") + + def set_background(self, bkg: BackgroundInterface): + if not isinstance(bkg, BinnedBackgroundInterface): + raise TypeError("Incorrect background type for binned likelihood.") @runtime_checkable class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ Needs to check that data, response and bkg are compatible """ - def __init__(self, - data: UnbinnedDataInterface, - response: UnbinnedExpectationInterface, - bkg: UnbinnedBackgroundInterface, - *args, **kwargs):... + def set_data(self, data: DataInterface): + if not isinstance(data, UnbinnedDataInterface): + raise TypeError("Incorrect data type for unbinned likelihood.") + + def set_response(self, response: ExpectationInterface): + if not isinstance(response, UnbinnedExpectationInterface): + raise TypeError("Incorrect data type for unbinned likelihood.") + + def set_background(self, bkg: BackgroundInterface): + if not isinstance(bkg, UnbinnedBackgroundInterface): + raise TypeError("Incorrect background type for unbinned likelihood.") diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index fff27da0..c6528d36 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -4,8 +4,9 @@ from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface -__all__ = ["SourceResponseInterface", - "ThreeMLModelResponseInterface", +from cosipy.spacecraftfile import SpacecraftHistory + +__all__ = ["ThreeMLModelResponseInterface", "UnbinnedThreeMLModelResponseInterface", "BinnedThreeMLModelResponseInterface", "ThreeMLSourceResponseInterface", @@ -13,12 +14,7 @@ "BinnedThreeMLSourceResponseInterface"] @runtime_checkable -class SourceResponseInterface(Protocol): - ... - def set_spacecraft_history(self, sc_history): - -@runtime_checkable -class ThreeMLModelResponseInterface(SourceResponseInterface, Protocol): +class ThreeMLModelResponseInterface(Protocol): def set_model(self, model: Model): """ The model is passed as a reference and it's parameters @@ -39,7 +35,8 @@ class BinnedThreeMLModelResponseInterface(ThreeMLModelResponseInterface, BinnedE """ @runtime_checkable -class ThreeMLSourceResponseInterface(SourceResponseInterface, Protocol): +class ThreeMLSourceResponseInterface(Protocol): + def set_source(self, source: Source): """ The source is passed as a reference and it's parameters diff --git a/cosipy/polarization/__init__.py b/cosipy/polarization/__init__.py index eadc83f2..5b8a163a 100644 --- a/cosipy/polarization/__init__.py +++ b/cosipy/polarization/__init__.py @@ -1,2 +1,3 @@ from .conventions import PolarizationConvention, OrthographicConvention, StereographicConvention, IAUPolarizationConvention from .polarization_angle import PolarizationAngle +from .polarization_axis import PolarizationAxis diff --git a/cosipy/polarization/conventions.py b/cosipy/polarization/conventions.py index a44eea01..68adbca8 100644 --- a/cosipy/polarization/conventions.py +++ b/cosipy/polarization/conventions.py @@ -46,6 +46,22 @@ def get_convention(cls, name, *args, **kwargs): except KeyError as e: raise Exception(f"No polarization convention by name '{name}'") from e + def get_convention_registered_name(cls, convention_class): + """ + Opposite of get_convention. Returns None if not found. + """ + + if isinstance(convention_class, PolarizationConvention): + # If the user passed the instant instead of the class + convention_class = type(convention_class) + + for conv_name, conv_class in cls._registered_conventions.items(): + if conv_class is convention_class: + return conv_name + + # If not found + return None + @property def frame(self): """ diff --git a/cosipy/polarization/polarization_angle.py b/cosipy/polarization/polarization_angle.py index b39073d5..e224da78 100644 --- a/cosipy/polarization/polarization_angle.py +++ b/cosipy/polarization/polarization_angle.py @@ -7,7 +7,8 @@ class PolarizationAngle: - def __init__(self, angle, skycoord , + def __init__(self, angle, + skycoord: SkyCoord = None, convention = 'iau', *args, **kwargs): """ @@ -18,7 +19,7 @@ def __init__(self, angle, skycoord , angle : :py:class:`astropy.coordinates.Angle Polarization angle skycoord : :py:class:`astropy.coordinates.SkyCoord` - Source direction + Source direction. Optional, but needed to use vector() and transform_to() convention : PolarizationConvention Convention the defined the polarization basis and direction in the polarization plane (for which the source direction is normal) @@ -49,12 +50,19 @@ def convention(self): def skycoord(self): return self._skycoord + @skycoord.setter + def skycoord(self, coord: SkyCoord): + self._skycoord = coord + @property def vector(self): """ Direction of the electric field vector """ + if self.skycoord is None: + raise RuntimeError("Set skycoord first") + # Get the projection vectors for the source direction in the current convention px, py = self.convention.get_basis(self.skycoord) @@ -74,6 +82,9 @@ def vector(self): def transform_to(self, convention, *args, **kwargs): + if self.skycoord is None: + raise RuntimeError("Set skycoord first") + # Standarize convention 2 convention2 = PolarizationConvention.get_convention(convention, *args, **kwargs) diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index 2afdcedb..af5ca004 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -1,3 +1,5 @@ +from cosipy.polarization.polarization_axis import PolarizationAxis + from .PointSourceResponse import PointSourceResponse from .DetectorResponse import DetectorResponse from .ExtendedSourceResponse import ExtendedSourceResponse @@ -575,8 +577,11 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal coordsys=SpacecraftFrame()) new.pa_convention = pa_convention - if 'Pol' in new._axes.labels and not (pa_convention == 'RelativeX' or pa_convention == 'RelativeY' or pa_convention == 'RelativeZ'): - raise RuntimeError("Polarization angle convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') must be provided") + if 'Pol' in new._axes.labels: + if not (pa_convention == 'RelativeX' or pa_convention == 'RelativeY' or pa_convention == 'RelativeZ'): + raise RuntimeError("Polarization angle convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') must be provided") + + new._axes['Pol'] = PolarizationAxis(new._axes['Pol'].edges, convention=pa_convention) return new @@ -867,81 +872,84 @@ def get_point_source_response(self, # TODO: deprecate exposure_map in favor of coords + scatt map for both local # and interntial coords - if Earth_occ == True: - if coord != None: - if coord.size > 1: - raise ValueError("For Earth occultation you must use the same coordinate as was used for the scatt map!") - if exposure_map is not None: - if not self.conformable(exposure_map): - raise ValueError( - "Exposure map has a different grid than the detector response") + return self.get_local_point_source_response(exposure_map) + else: + return self.get_inertial_point_source_response(coord, scatt_map, Earth_occ) - psr = PointSourceResponse(self.axes[1:], - sparse=self._sparse, - unit=u.cm*u.cm*u.s) + def get_local_point_source_response(self, exposure_map): - for p in range(self.npix): + if not self.conformable(exposure_map): + raise ValueError( + "Exposure map has a different grid than the detector response") - if exposure_map[p] != 0: - psr += self[p]*exposure_map[p] + polarization_axis = None + if 'Pol' in self.axes.labels: + polarization_axis = self.axes['Pol'] - return psr + return PointSourceResponse.from_dwell_time_map(self.axes[('Em','Phi','PsiChi')], exposure_map, self.axes['Ei'], polarization_axis) - else: + def get_inertial_point_source_response(self, + coord = None, + scatt_map = None, + earth_occ = True): - # Rotate to inertial coordinates + # Rotate to inertial coordinates + if earth_occ: + if coord != None: + if coord.size > 1: + raise ValueError( + "For Earth occultation you must use the same coordinate as was used for the scatt map!") - if coord is None or scatt_map is None: - raise ValueError("Provide either exposure map or coord + scatt_map") + if coord is None or scatt_map is None: + raise ValueError("Provide either exposure map or coord + scatt_map") - if isinstance(coord.frame, SpacecraftFrame): - raise ValueError("Local coordinate + scatt_map not currently supported") + if isinstance(coord.frame, SpacecraftFrame): + raise ValueError("Local coordinate + scatt_map not currently supported") - if self.is_sparse: - raise ValueError("Coord + scatt_map currently only supported for dense responses") + if self.is_sparse: + raise ValueError("Coord + scatt_map currently only supported for dense responses") - axis = "PsiChi" + axis = "PsiChi" - coords_axis = Axis(np.arange(coord.size+1), label = 'coords') + coords_axis = Axis(np.arange(coord.size + 1), label='coords') - psr = Histogram([coords_axis] + list(deepcopy(self.axes[1:])), - unit = self.unit * scatt_map.unit) + psr = Histogram([coords_axis] + list(deepcopy(self.axes[1:])), + unit=self.unit * scatt_map.unit) - psr.axes[axis].coordsys = coord.frame + psr.axes[axis].coordsys = coord.frame - for i,(pixels, exposure) in \ + for i, (pixels, exposure) in \ enumerate(zip(scatt_map.contents.coords.transpose(), scatt_map.contents.data * scatt_map.unit)): + # gc.collect() # HDF5 cache issues - #gc.collect() # HDF5 cache issues + att = Attitude.from_axes(x=scatt_map.axes['x'].pix2skycoord(pixels[0]), + y=scatt_map.axes['y'].pix2skycoord(pixels[1])) - att = Attitude.from_axes(x = scatt_map.axes['x'].pix2skycoord(pixels[0]), - y = scatt_map.axes['y'].pix2skycoord(pixels[1])) + coord.attitude = att - coord.attitude = att + # TODO: Change this to interpolation + loc_nulambda_pixels = np.array(self.axes['NuLambda'].find_bin(coord), + ndmin=1) - #TODO: Change this to interpolation - loc_nulambda_pixels = np.array(self.axes['NuLambda'].find_bin(coord), - ndmin = 1) + dr_pix = Histogram.concatenate(coords_axis, [self[i] for i in loc_nulambda_pixels]) - dr_pix = Histogram.concatenate(coords_axis, [self[i] for i in loc_nulambda_pixels]) + dr_pix.axes['PsiChi'].coordsys = SpacecraftFrame(attitude=att) - dr_pix.axes['PsiChi'].coordsys = SpacecraftFrame(attitude = att) + self._sum_rot_hist(dr_pix, psr, exposure, coord, self.pa_convention) - self._sum_rot_hist(dr_pix, psr, exposure, coord, self.pa_convention) + # Convert to PSR + psr = tuple([PointSourceResponse(psr.axes[1:], + contents=data, + sparse=psr.is_sparse, + unit=psr.unit) + for data in psr[:]]) - # Convert to PSR - psr = tuple([PointSourceResponse(psr.axes[1:], - contents = data, - sparse = psr.is_sparse, - unit = psr.unit) - for data in psr[:]]) - - if coord.size == 1: - return psr[0] - else: - return psr + if coord.size == 1: + return psr[0] + else: + return psr def _setup_extended_source_response_params(self, coordsys, nside_image, nside_scatt_map): """ diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index d02613fe..d87b2a8b 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -1,11 +1,14 @@ -from histpy import Histogram#, Axes, Axis +from cosipy.polarization.polarization_axis import PolarizationAxis +from cosipy.threeml.util import to_linear_polarization +from mhealpy import HealpixMap +from cosipy.interfaces import BinnedInstrumentResponseInterface +from cosipy.polarization import PolarizationAngle +from histpy import Histogram, Axis, Axes # , Axes, Axis import numpy as np import astropy.units as u from scoords import SpacecraftFrame, Attitude -from astromodels.core.polarization import Polarization, LinearPolarization, StokesPolarization - from .functions import get_integrated_spectral_model import logging @@ -67,38 +70,7 @@ def get_expectation(self, spectrum, polarization=None): Histogram with the expected counts on each analysis bin """ - # FIXME: the logic of this code block should be moved to 3ML. - # We want to see if the source is polarized, and if so, confirm - # transform to linear polarization. - # https://github.com/threeML/astromodels/blob/master/astromodels/core/polarization.py - if polarization is not None: - - if type(polarization) == Polarization: - # FIXME: Polarization is the base class, but a 3ML source - # with no polarization default to the base class. - # The base class shouldn't be able to be instantiated, - # and we should have a NullPolarization subclass or None - polarization = None - - elif isinstance(polarization, LinearPolarization): - - if polarization.degree.value is 0: - polarization = None - - elif isinstance(polarization, StokesPolarization): - - # FIXME: Here we should convert the any Stokes parameters to Linear - # The circular component looks like unpolarized to us. - # This conversion is not yet implemented in Astromodels - raise ValueError("Fix me. I can't handle StokesPolarization yet") - - else: - - if isinstance(polarization, Polarization): - raise TypeError(f"Fix me. I don't know how to handle this polarization type") - else: - raise TypeError(f"Polarization must be a Polarization subclass") - + polarization = to_linear_polarization(polarization) if polarization is None: @@ -147,3 +119,43 @@ def get_expectation(self, spectrum, polarization=None): raise RuntimeError("Expectation should be dimensionless, but has units of " + str(hist.unit) + ".") return hist + + @classmethod + def from_dwell_time_map(cls, + measured_axes:Axes, + response: BinnedInstrumentResponseInterface, + exposure_map: HealpixMap, + energy_axis: Axis, + polarization_axis: PolarizationAxis = None + ): + + axes = [energy_axis] + + polarization_centers = None + if polarization_axis is not None: + axes += [polarization_axis] + polarization_centers = polarization_axis.centers + + axes += list(measured_axes) + + psr = PointSourceResponse(axes, unit=u.cm * u.cm * u.s) + + for p in range(exposure_map.npix): + + coord = exposure_map.pix2skycoord(p) + + if exposure_map[p] != 0: + psr += response.differential_effective_area(measured_axes, coord, energy_axis.centers, polarization_centers) * exposure_map[p] + + return psr + + @classmethod + def from_scatt_map(cls, + measured_axes:Axes, + response: BinnedInstrumentResponseInterface, + exposure_map: HealpixMap, + energy_axis: Axis, + polarization_axis: PolarizationAxis = None + ): + pass + diff --git a/cosipy/response/__init__.py b/cosipy/response/__init__.py index 6e23bdc8..9a76f206 100644 --- a/cosipy/response/__init__.py +++ b/cosipy/response/__init__.py @@ -4,5 +4,5 @@ from .ExtendedSourceResponse import ExtendedSourceResponse from .threeml_response import * from .threeml_point_source_response import * - +from .instrument_response import * from .rsp_to_arf_rmf import RspArfRmfConverter \ No newline at end of file diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py new file mode 100644 index 00000000..9f0f055e --- /dev/null +++ b/cosipy/response/instrument_response.py @@ -0,0 +1,67 @@ +import numpy as np +from astropy.coordinates import SkyCoord +import astropy.units as u + +from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface + +from cosipy.polarization import PolarizationAngle +from cosipy.response import FullDetectorResponse + +from histpy import Axes, Histogram + + +__all__ = ["BinnedInstrumentResponse"] + +class BinnedInstrumentResponse(BinnedInstrumentResponseInterface): + + def __init__(self, response:FullDetectorResponse): + + self._dr = response + + def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle = None) -> Histogram: + """ + + Parameters + ---------- + axes: + Measured axes + direction: + Photon incoming direction in SC coordinates + energy: + Photon energy + polarization + Photon polarization angle + + Returns + ------- + The effective area times the event measurement probability distribution integrated on each of the bins + of the provided axes + """ + + # Check if we can use these axes + if 'PsiChi' not in axes.labels: + raise ValueError("PsiChi axis not present") + + if axes["PsiChi"].coordsys is None: + raise ValueError("PsiChi axes doesn't have a coordinate system") + + if polarization is not None: + if 'Pol' not in axes.labels: + raise RuntimeError("The FullDetectorResponse does not contain polarization information") + elif not np.array_equal(polarization, self._dr.axes['Pol'].centers): + # Matches the v0.3 behaviour + raise RuntimeError( + "Currently, the probed polarization angles need to match the underlying response matrix centers.") + + if not np.array_equal(energy, self._dr.axes['Ei'].centers): + # Matches the v0.3 behaviour + raise RuntimeError("Currently, the probed energy values need to match the underlying response matrix centers.") + + # Get the pixel as is since we already checked that the requested + # energy and polarization points match the underlying response centers + # Matches the v0.3 behaviour + pix = self._dr.ang2pix(direction) + + return self._dr[pix] + + diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 39ce796a..a4c6813c 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -1,43 +1,60 @@ import logging +from pathlib import Path +from typing import Union + +from mhealpy import HealpixBase + +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface +from cosipy.polarization.polarization_axis import PolarizationAxis +from cosipy.threeml.util import to_linear_polarization + logger = logging.getLogger(__name__) import copy from astromodels.sources import Source, PointSource from scoords import SpacecraftFrame -from histpy import Axes, Histogram -from cosipy.interfaces import BinnedThreeMLSourceResponseInterface +from histpy import Axes, Histogram, Axis +from cosipy.interfaces import BinnedThreeMLSourceResponseInterface, BinnedDataInterface -from cosipy.response import FullDetectorResponse +from cosipy.response import FullDetectorResponse, PointSourceResponse from cosipy.spacecraftfile import SpacecraftHistory, SpacecraftAttitudeMap from mhealpy import HealpixMap -__all__ = ["BinnedThreeMlPointSourceResponse"] +__all__ = ["BinnedThreeMLPointSourceResponseLocal"] -class BinnedThreeMlPointSourceResponse(BinnedThreeMLSourceResponseInterface): +class BinnedThreeMLPointSourceResponseLocal(BinnedThreeMLSourceResponseInterface): """ COSI 3ML plugin. Parameters ---------- dr: - Full detector response handle (**not** the file path) - sc_orientation: + Full detector response handle, or the file path + sc_history: Contains the information of the orientation: timestamps (astropy.Time) and attitudes (scoord.Attitude) that describe the spacecraft for the duration of the data included in the analysis """ def __init__(self, - dr: FullDetectorResponse, - sc_orientation: SpacecraftHistory, - ): + instrument_response: BinnedInstrumentResponseInterface, + sc_history: SpacecraftHistory, + dwell_time_map_base: HealpixBase, + energy_axis:Axis, + polarization_axis:PolarizationAxis = None): # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface - self._dr = dr - self._sc_ori = sc_orientation + self._sc_ori = sc_history + + if not isinstance(dwell_time_map_base.coordsys, SpacecraftFrame): + raise ValueError("The dwell_time_map_base must have a SpacecraftFrame coordinate system.") + self._dwell_time_map_base = dwell_time_map_base + + # Use setters for these self._source = None # Prevent unnecessary calculations and new memory allocations @@ -56,6 +73,10 @@ def __init__(self, self._psr = None + self._response = instrument_response + self._energy_axis = energy_axis + self._polarization_axis = polarization_axis + def clear_cache(self): self._source = None @@ -84,25 +105,31 @@ def set_source(self, source: Source): if not isinstance(source, PointSource): raise TypeError("I only know how to handle point sources!") + if (to_linear_polarization(source.spectrum.main.polarization) is not None and + self._polarization_axis is None): + raise RuntimeError("This response can't handle a polarized source.") + self._source = source - def expectation(self, axes:Axes, copy = True)-> Histogram: + def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: # TODO: check coordsys from axis # TODO: Earth occ always true in this case + if not isinstance(data, EmCDSBinnedData): + raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") + + if self._source is None: + raise RuntimeError("Call set_source() first.") + + if self._sc_ori is None: + raise RuntimeError("Call set_spacecraft_history() first.") + # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 source_dict = self._source.to_dict() coord = self._source.position.sky_coord - # Check if we can use these axes - if 'PsiChi' not in axes.labels: - raise ValueError("PsiChi axes not present") - - if axes["PsiChi"].coordsys is None: - raise ValueError("PsiChi axes doesn't have a coordinate system") - # Use cached expectation if nothing has changed if self._expectation is not None and self._last_convolved_source_dict == source_dict: if copy: @@ -116,30 +143,31 @@ def expectation(self, axes:Axes, copy = True)-> Histogram: # are expensive if self._psr is None or coord != self._last_convolved_source_skycoord: - coordsys = axes["PsiChi"].coordsys + coordsys = data.axes["PsiChi"].coordsys logger.info("... Calculating point source response ...") - if isinstance(coordsys, SpacecraftFrame): - dwell_time_map = self._sc_ori.get_dwell_map(coord, base = self._dr) - self._psr = self._dr.get_point_source_response(exposure_map=dwell_time_map) - else: - scatt_map = self._sc_ori.get_scatt_map(nside=self._dr.nside * 2, - target_coord=coord, - coordsys=coordsys, - earth_occ = True) - self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + dwell_time_map = self._sc_ori.get_dwell_map(coord, base=self._dwell_time_map_base) - logger.info(f"--> done (source name : {self._source.name})") + self._psr = PointSourceResponse.from_dwell_time_map(data.axes, self._response, + dwell_time_map, self._energy_axis, + self._polarization_axis) + # TODO: Move these lines to inertial version. + # scatt_map = self._sc_ori.get_scatt_map(nside=self._dr.nside * 2, + # target_coord=coord, + # coordsys=coordsys, + # earth_occ = True) + # self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + logger.info(f"--> done (source name : {self._source.name})") # Convolve with spectrum self._expectation = self._psr.get_expectation(self._source.spectrum.main.shape, self._source.spectrum.main.polarization) # Check if axes match - if axes != self._expectation.axes: + if data.axes != self._expectation.axes: raise ValueError( "Currently, the expectation axes must exactly match the detector response measurement axes") diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index 05da26a3..e11ed276 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -1,6 +1,7 @@ import copy -from cosipy.interfaces import BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface +from cosipy.interfaces import BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface, \ + BinnedDataInterface from astromodels import Model from astromodels.sources import PointSource, ExtendedSource @@ -82,12 +83,12 @@ def set_model(self, model: Model): self._model = model - def expectation(self, axes:Axes, copy:bool = True)->Histogram: + def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: """ Parameters ---------- - axes + data copy Returns @@ -98,9 +99,9 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: # https://github.com/threeML/threeML/issues/645 current_model_dict = self._model.to_dict() - if self._expectation is None or self._expectation.axes != axes: + if self._expectation is None or self._expectation.axes != data.axes: # Needs new memory allocation, and recompute everything - self._expectation = Histogram(axes) + self._expectation = Histogram(data.axes) else: # If nothing has changed in the model, we can use the cached expectation # as is. @@ -120,7 +121,7 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: # Convolve all sources with the response for source_name,psr in self._source_responses.items(): - self._expectation += psr.expectation(axes) + self._expectation += psr.expectation(data) # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 4edc4a82..db9ef6e2 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -1,12 +1,14 @@ import logging +from cosipy.interfaces.expectation_interface import ExpectationInterface + logger = logging.getLogger(__name__) from cosipy.interfaces import (BinnedLikelihoodInterface, UnbinnedLikelihoodInterface, BinnedDataInterface, BinnedExpectationInterface, - BinnedBackgroundInterface, + BinnedBackgroundInterface, DataInterface, BackgroundInterface, ) import numpy as np @@ -18,16 +20,24 @@ class UnbinnedLikelihood(UnbinnedLikelihoodInterface): ... class PoissonLikelihood(BinnedLikelihoodInterface): - def __init__(self, - data: BinnedDataInterface, - response: BinnedExpectationInterface, - bkg: BinnedBackgroundInterface, - *args, **kwargs): + def __init__(self): + + self._data = None + self._bkg = None + self._response = None + def set_data(self, data: DataInterface): + super().set_data(data) # Checks type self._data = data - self._bkg = bkg + + def set_response(self, response: ExpectationInterface): + super().set_response(response) # Checks type self._response = response + def set_background(self, bkg: BackgroundInterface): + super().set_background(bkg) # Checks type + self._bkg = bkg + @property def data (self) -> BinnedDataInterface: return self._data @property @@ -41,14 +51,17 @@ def has_bkg(self): def get_log_like(self) -> float: + if self._data is None or self._response is None: + raise RuntimeError("Set data and response before calling this function.") + # Compute expectation including background # If we don't have background, we won't modify the expectation, so # it's safe to use the internal cache. - expectation = self._response.expectation(self._data.data.axes, copy = self.has_bkg) + expectation = self._response.expectation(self._data, copy = self.has_bkg) if self.has_bkg: # We won't modify the bkg expectation, so it's safe to use the internal cache - expectation += self._bkg.expectation(self._data.data.axes, copy = False) + expectation += self._bkg.expectation(self._data, copy = False) # Get the arrays expectation = expectation.contents @@ -61,6 +74,10 @@ def get_log_like(self) -> float: @property def nobservations(self) -> int: + + if self._data is None: + raise RuntimeError("Set the data before calling this function.") + return self._data.data.contents.size diff --git a/cosipy/threeml/util.py b/cosipy/threeml/util.py new file mode 100644 index 00000000..406a3f6b --- /dev/null +++ b/cosipy/threeml/util.py @@ -0,0 +1,34 @@ +from astromodels.core.polarization import Polarization, LinearPolarization, StokesPolarization + +def to_linear_polarization(polarization: Polarization): + # FIXME: the logic of this code block should be moved to 3ML. + # We want to see if the source is polarized, and if so, confirm + # transform to linear polarization. + # https://github.com/threeML/astromodels/blob/master/astromodels/core/polarization.py + if polarization is not None: + + if type(polarization) == Polarization: + # FIXME: Polarization is the base class, but a 3ML source + # with no polarization default to the base class. + # The base class shouldn't be able to be instantiated, + # and we should have a NullPolarization subclass or None + polarization = None + + elif isinstance(polarization, LinearPolarization): + + if polarization.degree.value is 0: + polarization = None + + elif isinstance(polarization, StokesPolarization): + + # FIXME: Here we should convert the any Stokes parameters to Linear + # The circular component looks like unpolarized to us. + # This conversion is not yet implemented in Astromodels + raise ValueError("Fix me. I can't handle StokesPolarization yet") + + else: + + if isinstance(polarization, Polarization): + raise TypeError(f"Fix me. I don't know how to handle this polarization type") + else: + raise TypeError(f"Polarization must be a Polarization subclass") \ No newline at end of file diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index 66793e1d..808a24e9 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -1,16 +1,20 @@ import sys +from mhealpy import HealpixBase + from cosipy.statistics import PoissonLikelihood from histpy import Histogram from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface -from cosipy.response import BinnedThreeMLResponse, BinnedThreeMlPointSourceResponse +from cosipy.response import BinnedThreeMLResponse, BinnedInstrumentResponse, \ + BinnedThreeMLPointSourceResponseLocal from cosipy import BinnedData from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse from cosipy.util import fetch_wasabi_file +from cosipy.polarization import PolarizationAxis from scoords import SpacecraftFrame @@ -67,13 +71,12 @@ def main(): # Data preparation grb_bkg = BinnedData(data_path / "grb.yaml") + grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") + bkg = BinnedData(data_path / "background.yaml") - grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") - data = grb_bkg.binned_data.project('Em', 'Phi', 'PsiChi') - bkg_tmin = 1842597310.0 bkg_tmax = 1842597550.0 bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] @@ -86,8 +89,8 @@ def main(): ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring - dr = FullDetectorResponse.open( - data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5" + dr = FullDetectorResponse.open(dr_path) # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and @@ -95,23 +98,44 @@ def main(): bkg_dist += sys.float_info.min # ============ Interfaces ============== + data = grb_bkg.get_em_cds() + bkg = FreeNormBinnedBackground(bkg_dist) - psr = BinnedThreeMlPointSourceResponse(dr, ori) + instrument_response = BinnedInstrumentResponse(dr) + + if isinstance(data.axes['PsiChi'].coordsys, SpacecraftFrame): + local_coord_fit = True + else: + local_coord_fit = False + + if local_coord_fit: + pass + # polarization_axis = dr.axes + # PolarizationAxis(pola)) + + if local_coord_fit: + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + psr = BinnedThreeMLPointSourceResponseLocal(instrument_response, + sc_history=ori, + dwell_time_map_base = HealpixBase(nside = dr.nside, scheme = dr.scheme, coordsys=SpacecraftFrame()), + energy_axis=dr.axes['Ei'], + polarization_axis=dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) + else: + psr = BinnedThreeMLPointSourceResponse() + response = BinnedThreeMLResponse(point_source_response = psr) - class BinnedDataAux: - # We can move this to BinnedData later, but this shows the flexibility of using Protocols over abstract classes - # BinnedDataAux is a "BinnedDataInterface" since it implements the data() method, even if it doesn't - # explicitly derive from BinnedDataInterface - @property - def data(self) -> Histogram: - return data - data_aux = BinnedDataAux() - like_fun = PoissonLikelihood(data_aux, response, bkg) + like_fun = PoissonLikelihood() + like_fun.set_data(data) + like_fun.set_response(response) + like_fun.set_background(bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index 00dae4b6..df0761c4 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -4,6 +4,7 @@ from astromodels import LinearPolarization, SpectralComponent, Parameter from astromodels.core.polarization import Polarization import astropy.units as u +from cosipy import SpacecraftHistory from cosipy.statistics import PoissonLikelihood @@ -61,6 +62,10 @@ def __init__(self): def data(self) -> Histogram: return self._data + @property + def axes(self) -> Axes: + return self._data.axes + class ToyBkg(BinnedBackgroundInterface): """ @@ -79,9 +84,12 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def parameters(self) -> Dict[str, u.Quantity]: return {'norm': u.Quantity(self._norm)} - def expectation(self, axes: Axes, copy = True) -> Histogram: + def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: + + if not isinstance(data, ToyData): + raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") - if axes != self._unit_expectation.axes: + if data.axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") # Always a copy @@ -105,8 +113,12 @@ def set_source(self, source: Source): self._source = source - def expectation(self, axes: Axes, copy = True) -> Histogram: - if axes != self._unit_expectation.axes: + def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: + + if not isinstance(data, ToyData): + raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") + + if data.axes != self._unit_expectation.axes: raise ValueError("Wrong axes. I have fixed axes.") if self._source is None: @@ -127,6 +139,10 @@ def copy(self) -> "ToyPointSourceResponse": class ToyModelResponse(BinnedThreeMLModelResponseInterface): def __init__(self, psr: BinnedThreeMLSourceResponseInterface): + + if not isinstance(psr, ToyPointSourceResponse): + raise TypeError(f"Wrong psr type '{type(psr)}', expected {ToyPointSourceResponse}.") + self._psr = psr self._psr_copies = {} @@ -138,11 +154,15 @@ def set_model(self, model: Model): psr_copy.set_source(source) self._psr_copies[name] = psr_copy - def expectation(self, axes: Axes, copy = True) -> Histogram: - expectation = Histogram(axes) + def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: + + if not isinstance(data, ToyData): + raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") + + expectation = Histogram(data.axes) for source_name,psr in self._psr_copies.items(): - expectation += psr.expectation(axes, copy = False) + expectation += psr.expectation(data, copy = False) # Always a copy return expectation @@ -184,7 +204,11 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: #model = Model() # Uncomment for bkg-only hypothesis # Fit -cosi = ThreeMLPluginInterface('cosi', PoissonLikelihood(data, response, bkg)) +like_fun = PoissonLikelihood() +like_fun.set_data(data) +like_fun.set_response(response) +like_fun.set_background(bkg) +cosi = ThreeMLPluginInterface('cosi', like_fun) plugins = DataList(cosi) like = JointLikelihood(model, plugins) @@ -200,8 +224,8 @@ def expectation(self, axes: Axes, copy = True) -> Histogram: # Plot results fig, ax = plt.subplots() data.data.plot(ax) -expectation = response.expectation(data.data.axes) +expectation = response.expectation(data) if bkg is not None: - expectation = expectation + bkg.expectation(data.data.axes) + expectation = expectation + bkg.expectation(data) expectation.plot(ax) plt.show() From 87975c8508670507459a475d32dcab1b60be629f Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 27 Jun 2025 14:18:09 -0400 Subject: [PATCH 050/133] Small type annotation Signed-off-by: Israel Martinez --- cosipy/response/FullDetectorResponse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index af5ca004..be1a7518 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -729,7 +729,7 @@ def ndim(self): return self.axes.ndim @property - def axes(self): + def axes(self) -> Axes: """ List of axes. From 3c0eb88887a725d3c382a13b9b9e9989a024b76c Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 27 Jun 2025 14:19:33 -0400 Subject: [PATCH 051/133] Axes check Signed-off-by: Israel Martinez --- cosipy/response/instrument_response.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index 9f0f055e..d0d8f9a7 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -39,6 +39,10 @@ def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Q """ # Check if we can use these axes + + if self._dr.measurement_axes != axes: + raise ValueError("This implementation can only handle a fixed set of measurement axes equal to the underlying response file.") + if 'PsiChi' not in axes.labels: raise ValueError("PsiChi axis not present") @@ -46,7 +50,7 @@ def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Q raise ValueError("PsiChi axes doesn't have a coordinate system") if polarization is not None: - if 'Pol' not in axes.labels: + if 'Pol' not in self._dr.axes.labels: raise RuntimeError("The FullDetectorResponse does not contain polarization information") elif not np.array_equal(polarization, self._dr.axes['Pol'].centers): # Matches the v0.3 behaviour From c7ec659358e1e28358e4fc6537f0d33fd351c563 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 30 Jun 2025 14:32:52 -0400 Subject: [PATCH 052/133] Remove BinnedThreeMLPointSourceResponseLocal in favor of a general one that automatically handles local or inertial frames. - The user now specifies the photon direction, energy and polarization axes, which can, in principle, be arbitrary. Signed-off-by: Israel Martinez --- cosipy/response/FullDetectorResponse.py | 1 + cosipy/response/PointSourceResponse.py | 6 ++- .../response/threeml_point_source_response.py | 53 +++++++++++-------- ...ample_grb_fit_threeml_plugin_interfaces.py | 44 ++++----------- 4 files changed, 47 insertions(+), 57 deletions(-) diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index be1a7518..a8a81e94 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -739,6 +739,7 @@ def axes(self) -> Axes: """ return self._axes + @property def measurement_axes(self): return self.axes['Em', 'Phi', 'PsiChi'] diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index d87b2a8b..f57217fa 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -153,9 +153,11 @@ def from_dwell_time_map(cls, def from_scatt_map(cls, measured_axes:Axes, response: BinnedInstrumentResponseInterface, - exposure_map: HealpixMap, + scatt_map: HealpixMap, energy_axis: Axis, polarization_axis: PolarizationAxis = None ): - pass + + raise NotImplementedError("WiP") + #self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index a4c6813c..d7746e4f 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -15,7 +15,7 @@ from astromodels.sources import Source, PointSource from scoords import SpacecraftFrame -from histpy import Axes, Histogram, Axis +from histpy import Axes, Histogram, Axis, HealpixAxis from cosipy.interfaces import BinnedThreeMLSourceResponseInterface, BinnedDataInterface from cosipy.response import FullDetectorResponse, PointSourceResponse @@ -23,9 +23,9 @@ from mhealpy import HealpixMap -__all__ = ["BinnedThreeMLPointSourceResponseLocal"] +__all__ = ["BinnedThreeMLPointSourceResponse"] -class BinnedThreeMLPointSourceResponseLocal(BinnedThreeMLSourceResponseInterface): +class BinnedThreeMLPointSourceResponse(BinnedThreeMLSourceResponseInterface): """ COSI 3ML plugin. @@ -41,19 +41,15 @@ class BinnedThreeMLPointSourceResponseLocal(BinnedThreeMLSourceResponseInterface def __init__(self, instrument_response: BinnedInstrumentResponseInterface, sc_history: SpacecraftHistory, - dwell_time_map_base: HealpixBase, + direction_axis: HealpixAxis, energy_axis:Axis, - polarization_axis:PolarizationAxis = None): + polarization_axis:PolarizationAxis = None, + ): # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface self._sc_ori = sc_history - if not isinstance(dwell_time_map_base.coordsys, SpacecraftFrame): - raise ValueError("The dwell_time_map_base must have a SpacecraftFrame coordinate system.") - - self._dwell_time_map_base = dwell_time_map_base - # Use setters for these self._source = None @@ -74,9 +70,14 @@ def __init__(self, self._psr = None self._response = instrument_response + self._direction_axis = direction_axis self._energy_axis = energy_axis self._polarization_axis = polarization_axis + @property + def coordsys(self): + return self._direction_axis.coordsys + def clear_cache(self): self._source = None @@ -145,20 +146,30 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: coordsys = data.axes["PsiChi"].coordsys + if coordsys != self.coordsys: + raise ValueError(f"Coordinate system mismatch. Data has {coordsys} while this class has {self.coordsys}.") + logger.info("... Calculating point source response ...") - dwell_time_map = self._sc_ori.get_dwell_map(coord, base=self._dwell_time_map_base) + if isinstance(self.coordsys, SpacecraftFrame): + # Local coordinates - self._psr = PointSourceResponse.from_dwell_time_map(data.axes, self._response, - dwell_time_map, self._energy_axis, - self._polarization_axis) + dwell_time_map = self._sc_ori.get_dwell_map(coord, base=self._direction_axis) - # TODO: Move these lines to inertial version. - # scatt_map = self._sc_ori.get_scatt_map(nside=self._dr.nside * 2, - # target_coord=coord, - # coordsys=coordsys, - # earth_occ = True) - # self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + self._psr = PointSourceResponse.from_dwell_time_map(data.axes, self._response, + dwell_time_map, self._energy_axis, + self._polarization_axis) + + else: + # Inertial e/.g. galactic + raise NotImplementedError("Only local coordinates are supported for now.") + + # WiP + # TODO: Move these lines to inertial version. + scatt_map = self._sc_ori.get_scatt_map(nside=self._direction_axis.nside * 2, + target_coord=coord, + coordsys=self._direction_axis.coordsys, + earth_occ=True) logger.info(f"--> done (source name : {self._source.name})") @@ -182,5 +193,3 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: return self._expectation.copy() else: return self._expectation - - diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index 808a24e9..611f34bc 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -3,26 +3,19 @@ from mhealpy import HealpixBase from cosipy.statistics import PoissonLikelihood -from histpy import Histogram from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface -from cosipy.response import BinnedThreeMLResponse, BinnedInstrumentResponse, \ - BinnedThreeMLPointSourceResponseLocal +from cosipy.response import BinnedThreeMLResponse, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse from cosipy import BinnedData from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse -from cosipy.util import fetch_wasabi_file -from cosipy.polarization import PolarizationAxis - -from scoords import SpacecraftFrame from astropy.time import Time import astropy.units as u import numpy as np -import matplotlib.pyplot as plt from threeML import Band, PointSource, Model, JointLikelihood, DataList from astromodels import Parameter @@ -104,34 +97,19 @@ def main(): instrument_response = BinnedInstrumentResponse(dr) - if isinstance(data.axes['PsiChi'].coordsys, SpacecraftFrame): - local_coord_fit = True - else: - local_coord_fit = False - - if local_coord_fit: - pass - # polarization_axis = dr.axes - # PolarizationAxis(pola)) - - if local_coord_fit: - # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, - # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. - # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation - # can provide the response for an arbitrary directions, Ei and Pol values. - psr = BinnedThreeMLPointSourceResponseLocal(instrument_response, - sc_history=ori, - dwell_time_map_base = HealpixBase(nside = dr.nside, scheme = dr.scheme, coordsys=SpacecraftFrame()), - energy_axis=dr.axes['Ei'], - polarization_axis=dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) - else: - psr = BinnedThreeMLPointSourceResponse() - + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + # NOTE: this is currently only implemented for data in local coords + psr = BinnedThreeMLPointSourceResponse(instrument_response, + sc_history=ori, + direction_axis = data.axes['PsiChi'], + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) response = BinnedThreeMLResponse(point_source_response = psr) - - like_fun = PoissonLikelihood() like_fun.set_data(data) like_fun.set_response(response) From 214f2ae9b63a574074f3cef0c25a50e07426b364 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 30 Jun 2025 14:53:49 -0400 Subject: [PATCH 053/133] Change name from model response to model folding. Signed-off-by: Israel Martinez --- cosipy/interfaces/source_response_interface.py | 12 ++++++------ cosipy/interfaces/threeml_plugin_interface.py | 4 ++-- cosipy/response/threeml_response.py | 6 +++--- .../example_grb_fit_threeml_plugin_interfaces.py | 4 ++-- docs/api/interfaces/toy_interfaces_example.py | 6 +++--- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index c6528d36..7ccbe83e 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -6,15 +6,15 @@ from cosipy.spacecraftfile import SpacecraftHistory -__all__ = ["ThreeMLModelResponseInterface", - "UnbinnedThreeMLModelResponseInterface", - "BinnedThreeMLModelResponseInterface", +__all__ = ["ThreeMLModelFoldingInterface", + "UnbinnedThreeMLModelFoldingInterface", + "BinnedThreeMLModelFoldingInterface", "ThreeMLSourceResponseInterface", "UnbinnedThreeMLSourceResponseInterface", "BinnedThreeMLSourceResponseInterface"] @runtime_checkable -class ThreeMLModelResponseInterface(Protocol): +class ThreeMLModelFoldingInterface(Protocol): def set_model(self, model: Model): """ The model is passed as a reference and it's parameters @@ -23,13 +23,13 @@ def set_model(self, model: Model): """ @runtime_checkable -class UnbinnedThreeMLModelResponseInterface(UnbinnedExpectationInterface, ThreeMLModelResponseInterface, Protocol): +class UnbinnedThreeMLModelFoldingInterface(UnbinnedExpectationInterface, ThreeMLModelFoldingInterface, Protocol): """ No new methods. Just the inherited ones. """ @runtime_checkable -class BinnedThreeMLModelResponseInterface(ThreeMLModelResponseInterface, BinnedExpectationInterface, Protocol): +class BinnedThreeMLModelFoldingInterface(ThreeMLModelFoldingInterface, BinnedExpectationInterface, Protocol): """ No new methods. Just the inherited ones. """ diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py index 58155bd9..eb5f15cf 100644 --- a/cosipy/interfaces/threeml_plugin_interface.py +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -1,6 +1,6 @@ from typing import Dict -from cosipy.interfaces import BinnedThreeMLModelResponseInterface, ThreeMLModelResponseInterface +from cosipy.interfaces import ThreeMLModelFoldingInterface from cosipy.interfaces.likelihood_interface import LikelihoodInterface from threeML import PluginPrototype, Parameter @@ -28,7 +28,7 @@ def __init__(self, self._like = likelihood # Check we can use this likelihood - if not isinstance(self._like.response, ThreeMLModelResponseInterface): + if not isinstance(self._like.response, ThreeMLModelFoldingInterface): raise TypeError("ThreeMLPluginInterface needs a LikelihoodInterface using a response of type ThreeMLModelResponseInterface") # Currently, the only nuisance parameters are the ones for the bkg diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index e11ed276..2de4c0f0 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -1,6 +1,6 @@ import copy -from cosipy.interfaces import BinnedThreeMLModelResponseInterface, BinnedThreeMLSourceResponseInterface, \ +from cosipy.interfaces import BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, \ BinnedDataInterface from astromodels import Model @@ -8,9 +8,9 @@ from histpy import Axes, Histogram -__all__ = ["BinnedThreeMLResponse"] +__all__ = ["BinnedThreeMLModelFolding"] -class BinnedThreeMLResponse(BinnedThreeMLModelResponseInterface): +class BinnedThreeMLModelFolding(BinnedThreeMLModelFoldingInterface): def __init__(self, point_source_response:BinnedThreeMLSourceResponseInterface = None, diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index 611f34bc..b45e755d 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -6,7 +6,7 @@ from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface -from cosipy.response import BinnedThreeMLResponse, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse from cosipy import BinnedData from cosipy.spacecraftfile import SpacecraftHistory @@ -108,7 +108,7 @@ def main(): energy_axis = dr.axes['Ei'], polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) - response = BinnedThreeMLResponse(point_source_response = psr) + response = BinnedThreeMLModelFolding(point_source_response = psr) like_fun = PoissonLikelihood() like_fun.set_data(data) diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/toy_interfaces_example.py index df0761c4..99b482ab 100644 --- a/docs/api/interfaces/toy_interfaces_example.py +++ b/docs/api/interfaces/toy_interfaces_example.py @@ -10,7 +10,7 @@ from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, - BinnedThreeMLModelResponseInterface, + BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, ThreeMLPluginInterface, BackgroundInterface) from histpy import Axis, Axes, Histogram @@ -136,7 +136,7 @@ def copy(self) -> "ToyPointSourceResponse": # re-initializing any member. return copy.copy(self) -class ToyModelResponse(BinnedThreeMLModelResponseInterface): +class ToyModelFolding(BinnedThreeMLModelFoldingInterface): def __init__(self, psr: BinnedThreeMLSourceResponseInterface): @@ -174,7 +174,7 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: # are hardcoded above withing the classes, then it's not necessary here. data = ToyData() psr = ToyPointSourceResponse() -response = ToyModelResponse(psr) +response = ToyModelFolding(psr) bkg = ToyBkg() ## Source model From 860627b49a0160cfb780e16e4fcfcd66584c6ff0 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 14 Jul 2025 16:01:55 -0400 Subject: [PATCH 054/133] BinnedInstrumentResponse can now handle data in inertial coordinates. - Not tested yet. - Checked that local coordinates still produce the same results. Signed-off-by: Israel Martinez --- .../instrument_response_interface.py | 32 ++- cosipy/response/FullDetectorResponse.py | 66 +++--- cosipy/response/PointSourceResponse.py | 55 ++++- cosipy/response/instrument_response.py | 211 ++++++++++++++++-- .../response/threeml_point_source_response.py | 51 ++++- cosipy/spacecraftfile/spacecraft_file.py | 2 +- 6 files changed, 358 insertions(+), 59 deletions(-) diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index 2799ddf7..a465ea01 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -1,9 +1,11 @@ -from typing import Protocol +from typing import Protocol, Union from astropy.coordinates import SkyCoord +from astropy.units import Quantity from histpy import Axes, Histogram from astropy import units as u +from scoords import Attitude from cosipy.polarization import PolarizationAngle @@ -11,7 +13,15 @@ class BinnedInstrumentResponseInterface(Protocol): - def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle) -> Histogram: + def differential_effective_area(self, + axes: Axes, + direction: SkyCoord, + energy:u.Quantity, + polarization:PolarizationAngle, + attitude:Attitude, + weight: Union[Quantity, float], + out: Quantity, + add_inplace: bool) -> Quantity: """ Parameters @@ -19,14 +29,26 @@ def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Q axes: Measured axes direction: - Photon incoming direction in SC coordinates + Photon incoming direction. If not in a SpacecraftFrame, then provide an attitude for the transformation energy: Photon energy polarization - Photon polarization angle + Photon polarization angle. If the coordinate frame of the polarization convention is not a + SpacecraftFrame, then provide an attitude for the transformation + attitude + Attitude defining the orientation of the SC in an inertial coordinate system. + weight + Optional. Weighting the result by a given weight. Providing the weight at this point as opposed to + apply it to the output can result in greater efficiency. + out + Optional. Histogram to store the output. If possible, the implementation should try to avoid allocating + new memory. + add_inplace + Optional. If True and a Histogram output was provided, the implementation should try to avoid allocating new + memory and add --not set-- the result of this operation to the output. Returns ------- The effective area times the event measurement probability distribution integrated on each of the bins - of the provided axes + of the provided axes. It has the shape (direction.shape, energy.shape, polarization.shape, axes.shape) """ diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index a8a81e94..b553372c 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -136,6 +136,16 @@ def _open_h5(cls, filename, pa_convention=None): scheme=axis.attrs['SCHEME'], coordsys=SpacecraftFrame())] + elif axis_label == "Pol": + + if not (pa_convention == 'RelativeX' or pa_convention == 'RelativeY' or pa_convention == 'RelativeZ'): + raise RuntimeError( + "Polarization angle convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') must be provided") + + axes += [PolarizationAxis(np.array(axis) * u.Unit(axis.attrs['UNIT']), + convention = pa_convention, + label=axis_label)] + else: axes += [Axis(np.array(axis) * u.Unit(axis.attrs['UNIT']), scale=axis_type, @@ -148,12 +158,18 @@ def _open_h5(cls, filename, pa_convention=None): base=new.axes['NuLambda'], coordsys=SpacecraftFrame()) - new.pa_convention = pa_convention - if 'Pol' in new._axes.labels and not (pa_convention == 'RelativeX' or pa_convention == 'RelativeY' or pa_convention == 'RelativeZ'): - raise RuntimeError("Polarization angle convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') must be provided") - return new + @property + def pa_convention(self): + """ + Return the convention of the "Pol" if it exists. Returns 'None' otherwise. + """ + if 'Pol' in self.axes.labels: + return self.axes['Pol'].convention + else: + return None + @classmethod def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = False,alpha=0,emin=90,emax=10000, pa_convention=None): """ @@ -576,12 +592,11 @@ def _open_rsp(cls, filename, Spectrumfile=None,norm="Linear" ,single_pixel = Fal base=new.axes['NuLambda'], coordsys=SpacecraftFrame()) - new.pa_convention = pa_convention if 'Pol' in new._axes.labels: if not (pa_convention == 'RelativeX' or pa_convention == 'RelativeY' or pa_convention == 'RelativeZ'): raise RuntimeError("Polarization angle convention of response ('RelativeX', 'RelativeY', or 'RelativeZ') must be provided") - new._axes['Pol'] = PolarizationAxis(new._axes['Pol'].edges, convention=pa_convention) + new._axes['Pol'] = PolarizationAxis(new._axes['Pol'].edges, label = 'Pol', convention=pa_convention) return new @@ -1122,12 +1137,25 @@ def merge_psr_to_extended_source_response(self, basename, coordsys = 'galactic', return extended_source_response @staticmethod - def _sum_rot_hist(h, h_new, exposure, coord, pa_convention, axis = "PsiChi"): + def _sum_rot_hist(h, h_new, exposure, axis = "PsiChi", pol_axis = 'Pol'): """ Rotate a histogram with HealpixAxis h into the grid of h_new, and sum it up with the weight of exposure. Meant to rotate the PsiChi of a CDS from local to galactic + Parameters + ---------- + h + h_new + exposure + axis: + Label of axis to rotate + pol_axis: + Label of axis containing polarization information (if any). This will also be rotated. + + Returns + ------- + """ axis_id = h.axes.label_to_index(axis) @@ -1139,32 +1167,20 @@ def _sum_rot_hist(h, h_new, exposure, coord, pa_convention, axis = "PsiChi"): new_axis = h_new.axes[axis_id] # Convolve + # 0-th order approximation # TODO: Change this to interpolation (pixels + weights) old_pixels = old_axis.find_bin(new_axis.pix2skycoord(np.arange(new_axis.nbins))) - if 'Pol' in h.axes.labels and h_new.axes[axis].coordsys.name != 'spacecraftframe': + if pol_axis in h.axes.labels and h_new.axes[axis].coordsys.name != 'spacecraftframe': - if coord.size > 1: - raise ValueError("For polarization, only a single source coordinate is supported") - - from cosipy.polarization.polarization_angle import PolarizationAngle - from cosipy.polarization.conventions import IAUPolarizationConvention - - pol_axis_id = h.axes.label_to_index('Pol') + pol_axis_id = h.axes.label_to_index(pol_axis) old_pol_axis = h.axes[pol_axis_id] new_pol_axis = h_new.axes[pol_axis_id] old_pol_indices = [] - for i in range(h_new.axes['Pol'].nbins): - - pa = PolarizationAngle(h_new.axes['Pol'].centers.to_value(u.deg)[i] * u.deg, coord.transform_to('icrs'), convention=IAUPolarizationConvention()) - pa_old = pa.transform_to(pa_convention, attitude=coord.attitude) - - if pa_old.angle.deg == 180.: - pa_old = PolarizationAngle(0. * u.deg, coord, convention=IAUPolarizationConvention()) - - old_pol_indices.append(old_pol_axis.find_bin(pa_old.angle)) + for pa_new in new_pol_axis.centers: + old_pol_indices.append(old_pol_axis.find_bin(pa_new)) old_pol_indices = np.array(old_pol_indices) @@ -1185,7 +1201,7 @@ def _sum_rot_hist(h, h_new, exposure, coord, pa_convention, axis = "PsiChi"): #h_new[{axis:new_pix}] += exposure * h[{axis: old_pix}] # * norm_corr # The following code does the same than the code above, but is faster - if not 'Pol' in h.axes.labels: + if not pol_axis in h.axes.labels: old_index = (slice(None),)*axis_id + (old_pix,) new_index = (slice(None),)*axis_id + (new_pix,) diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index f57217fa..543511c7 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -1,17 +1,22 @@ +from astropy.coordinates import SkyCoord +from astropy.units import Quantity + from cosipy.polarization.polarization_axis import PolarizationAxis from cosipy.threeml.util import to_linear_polarization from mhealpy import HealpixMap from cosipy.interfaces import BinnedInstrumentResponseInterface -from cosipy.polarization import PolarizationAngle from histpy import Histogram, Axis, Axes # , Axes, Axis import numpy as np import astropy.units as u -from scoords import SpacecraftFrame, Attitude +from scoords import Attitude from .functions import get_integrated_spectral_model import logging + +from cosipy.spacecraftfile import SpacecraftAttitudeMap + logger = logging.getLogger(__name__) class PointSourceResponse(Histogram): @@ -151,13 +156,53 @@ def from_dwell_time_map(cls, @classmethod def from_scatt_map(cls, + coord: SkyCoord, measured_axes:Axes, response: BinnedInstrumentResponseInterface, - scatt_map: HealpixMap, + scatt_map: SpacecraftAttitudeMap, energy_axis: Axis, polarization_axis: PolarizationAxis = None ): + """ + + Parameters + ---------- + measured_axes + response + scatt_map + energy_axis + polarization_axis + + Returns + ------- + + """ + + axes = [energy_axis] + + if polarization_axis is not None: + axes += [polarization_axis] + + axes += list(measured_axes) + + psr = Quantity(np.empty(shape=axes.shape), unit = u.cm * u.cm * u.s) + + for i, (pixels, exposure) in \ + enumerate(zip(scatt_map.contents.coords.transpose(), + scatt_map.contents.data * scatt_map.unit)): + + att = Attitude.from_axes(x=scatt_map.axes['x'].pix2skycoord(pixels[0]), + y=scatt_map.axes['y'].pix2skycoord(pixels[1])) + + + response.differential_effective_area(measured_axes, + coord, + energy_axis.centers, + polarization_axis.centers, + attitude = att, + weight=exposure, + out=psr.contents, + add_inplace=True) - raise NotImplementedError("WiP") - #self._psr = self._dr.get_point_source_response(coord=coord, scatt_map=scatt_map) + return PointSourceResponse(axes, contents = psr) diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index d0d8f9a7..59271c81 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -1,10 +1,14 @@ +from typing import Union + import numpy as np from astropy.coordinates import SkyCoord import astropy.units as u +from astropy.units import Quantity +from scoords import Attitude, SpacecraftFrame from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface -from cosipy.polarization import PolarizationAngle +from cosipy.polarization import PolarizationAngle, PolarizationAxis from cosipy.response import FullDetectorResponse from histpy import Axes, Histogram @@ -18,8 +22,25 @@ def __init__(self, response:FullDetectorResponse): self._dr = response - def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle = None) -> Histogram: + @property + def is_polarization_response(self): + return 'Pol' in self._dr.axes.labels + + def differential_effective_area(self, + axes: Axes, + direction: SkyCoord, + energy:u.Quantity, + polarization:PolarizationAngle = None, + attitude:Attitude = None, + weight:Union[Quantity, float] = None, + out:Quantity = None, + add_inplace:bool = False) -> Quantity: """ + Interpolations and bin coupling: + * The direction is always bi-linearly interpolated. + * Ei, Em and Phi always needs to match the response exactly + * If PsiChi is in local coordinates, PsiChi and polarization need to match the response exactly + * If PsiChi is in inertial coordinates, PsiChi and polarization are interpolated at 0-th order during the rotation Parameters ---------- @@ -31,6 +52,17 @@ def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Q Photon energy polarization Photon polarization angle + attitude + Attitude defining the orientation of the SC in an inertial coordinate system. + weight + Optional. Weighting the result by a given weight. Providing the weight at this point as opposed to + apply it to the output can result in greater efficiency. + out: + Optional. Histogram to store the output. If possible, the implementation should try to avoid allocating + new memory. + add_inplace + If True and a Histogram output was provided, we will try to avoid allocating new + memory and add --not set-- the result of this operation to the output. Returns ------- @@ -38,34 +70,181 @@ def differential_effective_area(self, axes:Axes, direction: SkyCoord, energy:u.Q of the provided axes """ - # Check if we can use these axes + # Check if we're getting the expected axes and other limitations + if set(axes.labels) != {'Em','PsiChi','Phi'}: + raise ValueError(f"Unexpected axes labels. Expecting \"{{'Em','PsiChi','Phi'}}\", got {axes.labels}") + + if self._dr.measurement_axes["Em"] != axes["Em"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed measured energy (Em) binning equal to the underlying response file.") + + if self._dr.measurement_axes["Phi"] != axes["Phi"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed scattering angle (Phi) binning equal to the underlying response file.") + + if not np.array_equal(energy, self._dr.axes['Ei'].centers): + # Matches the v0.3 behaviour + raise RuntimeError("Currently, the probed energy values need to match the underlying response matrix Ei centers.") - if self._dr.measurement_axes != axes: - raise ValueError("This implementation can only handle a fixed set of measurement axes equal to the underlying response file.") + results_axes_labels = ['Ei'] - if 'PsiChi' not in axes.labels: - raise ValueError("PsiChi axis not present") + if polarization is not None: + if not self.is_polarization_response: + raise RuntimeError("The FullDetectorResponse does not contain polarization information") if axes["PsiChi"].coordsys is None: raise ValueError("PsiChi axes doesn't have a coordinate system") + if direction.shape != (): + raise ValueError("Currently this implementation can only deal with one direction at a time") + + # Fork for local and galactic PsiChi coordinates + if not isinstance(axes["PsiChi"].coordsys, SpacecraftFrame): + # Is inertial + if attitude is None: + raise InputError("User need to provide the attitude information in order to transform to spacecraft coordinates") + + return self._differential_effective_area_inertial(attitude, axes, direction, polarization, weight, out, add_inplace) + + # Is local + + # Check again remaining axes + if self._dr.measurement_axes["PsiChi"] != axes["PsiChi"]: + # Matches the v0.3 behaviour + raise ValueError("This implementation can only handle a fixed scattering direction (PsiChi) binning equal to the underlying response file.") + if polarization is not None: - if 'Pol' not in self._dr.axes.labels: - raise RuntimeError("The FullDetectorResponse does not contain polarization information") - elif not np.array_equal(polarization, self._dr.axes['Pol'].centers): + if not np.array_equal(polarization, self._dr.axes['Pol'].centers): # Matches the v0.3 behaviour raise RuntimeError( - "Currently, the probed polarization angles need to match the underlying response matrix centers.") - - if not np.array_equal(energy, self._dr.axes['Ei'].centers): - # Matches the v0.3 behaviour - raise RuntimeError("Currently, the probed energy values need to match the underlying response matrix centers.") + "Currently, the probed polarization angles need to match the underlying response matrix Pol centers.") # Get the pixel as is since we already checked that the requested # energy and polarization points match the underlying response centers # Matches the v0.3 behaviour pix = self._dr.ang2pix(direction) - return self._dr[pix] + # TODO: Update after Pr364. get_pixel(pix, weight) should make this more efficient + if weight is not None: + result = self._dr[pix] * weight + else: + result = self._dr[pix] + + # Fix order of output axes to the standard by the interface + results_axes_labels = ['Ei'] + + if polarization is not None: + results_axes_labels += ['Pol'] + + results_axes_labels += list(axes.labels) + + result = result.project(results_axes_labels) + + if polarization is None and self.is_polarization_response: + # It was implicitly converted to unpolarized response by the + # projection above, but this is still needed to get the mean + result /= self._dr.axes.nbins + + return self._fill_out_and_return(result, out, add_inplace) + + @staticmethod + def _fill_out_and_return(result:Histogram, out:Quantity, add_inplace:bool = False) -> Quantity: + + if out is None: + # Convert to base class + return result.contents + else: + + if out.shape != result.shape: + raise ValueError("The provided out argument doesn't have the right shape." + f"Expected {result.shape}, got {out.axes.shape}") + + if add_inplace: + out += result.contents + else: + out[:] = result.contents + + return out + + def _differential_effective_area_inertial(self, + attitude:Attitude, + axes:Axes, + direction: SkyCoord, + polarization:PolarizationAngle = None, + weight:Union[float, Quantity] = None, + out: Quantity = None, + add_inplace:bool = False, + ) -> Quantity: + """ + Will rotate PsiChi from local to inertial coordinates + + Parameters + ---------- + axes + direction + energy + polarization + attitude + + Returns + ------- + + """ + + # Get response in local coordinates + direction = direction.transform_to(SpacecraftFrame(attitude=attitude)) + + # TODO: Change to get_pixel(pix, weight) after PR 364 + dr_pix = self._dr[self._dr.ang2pix(direction)] + + dr_pix.axes['PsiChi'].coordsys = SpacecraftFrame(attitude=attitude) + + # Generate axes that will allow us to use _sum_rot_hist, + # and obtain the same results as in v3.x + out_axes = [self._dr.axes['Ei']] + + if self.is_polarization_response: + + # Since we're doing a 0-th order interpolation, the only thing that matter are the bin centers, + # so we're placing them at the input polarization angles + + if np.any(polarization.angle[1:] - polarization.angle[:-1] < 0): + raise ValueError("This implementation requires strictly monotonically increasing polarization angles") + + pol_edges = (polarization.angle[:-1] + polarization.angle[1:])/2 + + pol_edges = np.concatenate(pol_edges[0] - 2*(pol_edges[0] - polarization.angle[0]), pol_edges) + pol_edges = np.concatenate(pol_edges, pol_edges[-1] + 2 * (polarization.angle[-1] - pol_edges[-1])) + + out_axes += [PolarizationAxis(pol_edges, convention = polarization.convention)] + + out_axes += list(axes) + + # Either initialize a new + if out is None: + out = Histogram(out_axes, + unit = dr_pix.unit) + else: + if not add_inplace: + out.fill(0.) + + out = Histogram(out_axes, + contents = out, + copy_contents=False) + + if weight is None: + # Weight takes the role of the exposure in _sum_rot_hist, which is not an optional argument + weight = 1 + + FullDetectorResponse._sum_rot_hist(dr_pix, out, weight, + axis = 'Psi', + pol_axis = 'Pol') + + return out.contents + + + + + diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index d7746e4f..c66fece6 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -44,7 +44,33 @@ def __init__(self, direction_axis: HealpixAxis, energy_axis:Axis, polarization_axis:PolarizationAxis = None, + scattmap_nside = None ): + """ + + Parameters + ---------- + instrument_response: + A BinnedInstrumentResponseInterface capable of providing the differential + effective area in local coordinates as a function of direction, energy and + polarization. + sc_history: + The SpacecraftHistory describing the SC orbit and attitude vs time. + direction_axis: + The desired effective binning of the photon direction (aka NuLamda). + This also determines the coordinate system. If the coordinate system is + inertial, then internally the `instrument_response` will be rotated + from local coordinate based on the `sc_history` information vs time. + energy_axis: + The desired effective binning of the photon energy (aka Ei) + polarization_axis: + The desired effective binning of the photon polarization angle (aka Pol). + This also defined the polarization coordinate system and convention. + scattmap_nside: + If transformation from local to an inertial system is needed, the spacecraft + attitude will be first discretized based on this nside. Default: twice the + nside of direction_axis. + """ # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface @@ -74,6 +100,11 @@ def __init__(self, self._energy_axis = energy_axis self._polarization_axis = polarization_axis + if scattmap_nside is None: + self._scattmap_nside = 2*self._direction_axis.nside + else: + self._scattmap_nside = scattmap_nside + @property def coordsys(self): return self._direction_axis.coordsys @@ -156,21 +187,27 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: dwell_time_map = self._sc_ori.get_dwell_map(coord, base=self._direction_axis) - self._psr = PointSourceResponse.from_dwell_time_map(data.axes, self._response, - dwell_time_map, self._energy_axis, + self._psr = PointSourceResponse.from_dwell_time_map(data.axes, + self._response, + dwell_time_map, + self._energy_axis, self._polarization_axis) else: - # Inertial e/.g. galactic - raise NotImplementedError("Only local coordinates are supported for now.") + # Inertial e..g. galactic - # WiP - # TODO: Move these lines to inertial version. - scatt_map = self._sc_ori.get_scatt_map(nside=self._direction_axis.nside * 2, + scatt_map = self._sc_ori.get_scatt_map(nside=self._scattmap_nside, target_coord=coord, coordsys=self._direction_axis.coordsys, earth_occ=True) + self._psr = PointSourceResponse.from_scatt_map(coord, + data.axes, + self._response, + scatt_map, + self._energy_axis, + self._polarization_axis) + logger.info(f"--> done (source name : {self._source.name})") # Convolve with spectrum diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index bc672524..80b94342 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -568,7 +568,7 @@ def get_scatt_map(self, coordsys = 'galactic', r_earth = 6378.0, earth_occ = True - ): + ) -> SpacecraftAttitudeMap: """ Bin the spacecraft attitude history into a 4D histogram that From e4a8d9c2f8135ec5d1969e78f9a58f192688d961 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 15 Jul 2025 14:32:13 -0400 Subject: [PATCH 055/133] Debugging changes. Still wip. Signed-off-by: Israel Martinez --- cosipy/response/PointSourceResponse.py | 5 +- cosipy/response/instrument_response.py | 2 +- cosipy/spacecraftfile/spacecraft_file.py | 15 +- ...ample_grb_fit_threeml_plugin_interfaces.py | 247 ++++++++++++++---- 4 files changed, 212 insertions(+), 57 deletions(-) diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index 543511c7..753ba122 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -184,6 +184,7 @@ def from_scatt_map(cls, axes += [polarization_axis] axes += list(measured_axes) + axes = Axes(axes) psr = Quantity(np.empty(shape=axes.shape), unit = u.cm * u.cm * u.s) @@ -198,10 +199,10 @@ def from_scatt_map(cls, response.differential_effective_area(measured_axes, coord, energy_axis.centers, - polarization_axis.centers, + None if polarization_axis is None else polarization_axis.centers, attitude = att, weight=exposure, - out=psr.contents, + out=psr, add_inplace=True) return PointSourceResponse(axes, contents = psr) diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index 59271c81..f6f45c0b 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -237,7 +237,7 @@ def _differential_effective_area_inertial(self, weight = 1 FullDetectorResponse._sum_rot_hist(dr_pix, out, weight, - axis = 'Psi', + axis = 'PsiChi', pol_axis = 'Pol') return out.contents diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 80b94342..f5f16ea0 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -566,7 +566,7 @@ def get_scatt_map(self, target_coord=None, scheme = 'ring', coordsys = 'galactic', - r_earth = 6378.0, + r_earth = None, earth_occ = True ) -> SpacecraftAttitudeMap: @@ -585,7 +585,7 @@ def get_scatt_map(self, The scheme of the scatt map (the default is "ring") coordsys : str, optional The coordinate system used in the scatt map (the default is "galactic). - r_earth : float, optional + r_earth : Quantity, optional Earth radius in km (default is 6378 km). earth_occ : bool, optional Option to include Earth occultation in scatt map calculation. @@ -596,7 +596,10 @@ def get_scatt_map(self, h_ori : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap The spacecraft attitude map. """ - + + if r_earth is None: + r_earth = 6378.0 * u.km + # Check if target_coord is needed if earth_occ and target_coord is None: raise ValueError("target_coord is needed when earth_occ = True") @@ -619,11 +622,11 @@ def get_scatt_map(self, x,y,z = attitudes[:-1].as_axes() # Get max angle based on altitude: - max_angle = np.pi - np.arcsin(r_earth/(r_earth + altitude)) - max_angle *= (180/np.pi) # angles in degree + max_angle = np.pi*u.rad - np.arcsin(r_earth/(r_earth + altitude)) + max_angle = max_angle.to_value(u.deg) # angles in degree # Define weights and set to 0 if blocked by Earth: - weight = self.livetime*u.s + weight = self.livetime if earth_occ: # Calculate angle between source direction and Earth zenith diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index b45e755d..14646430 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -1,7 +1,16 @@ +import logging + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s' + ) + import sys from mhealpy import HealpixBase +from matplotlib import pyplot as plt + from cosipy.statistics import PoissonLikelihood from cosipy.background_estimation import FreeNormBinnedBackground @@ -34,54 +43,110 @@ def main(): # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') - # Set model to fit - l = 93. - b = -53. - - alpha = -1 - beta = -3 - xp = 450. * u.keV - piv = 500. * u.keV - K = 1 / u.cm / u.cm / u.s / u.keV - - spectrum = Band() - spectrum.beta.min_value = -15.0 - spectrum.alpha.value = alpha - spectrum.beta.value = beta - spectrum.xp.value = xp.value - spectrum.K.value = K.value - spectrum.piv.value = piv.value - spectrum.xp.unit = xp.unit - spectrum.K.unit = K.unit - spectrum.piv.unit = piv.unit - - source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l = l, # Longitude (deg) - b = b, # Latitude (deg) - spectral_shape = spectrum) # Spectral model - - model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) - - # Data preparation - grb_bkg = BinnedData(data_path / "grb.yaml") - grb_bkg.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") - - bkg = BinnedData(data_path / "background.yaml") - - bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") - - bkg_tmin = 1842597310.0 - bkg_tmax = 1842597550.0 - bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] - bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] - bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') - - # Prepare instrument response and SC history - tmin = Time(1842597410.0, format='unix') - tmax = Time(1842597450.0, format='unix') - ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) - ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring + case = 'grb' + case = 'crab' + + if case == 'grb': + + # Set model to fit + l = 93. + b = -53. + + alpha = -1 + beta = -3 + xp = 450. * u.keV + piv = 500. * u.keV + K = 1 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + spectrum.beta.min_value = -15.0 + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + # Date preparation + binned_data = BinnedData(data_path / "grb.yaml") + binned_data.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") + + bkg = BinnedData(data_path / "background.yaml") + + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") + + bkg_tmin = 1842597310.0 + bkg_tmax = 1842597550.0 + bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] + bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] + bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') + + tmin = Time(1842597410.0, format='unix') + tmax = Time(1842597450.0, format='unix') + + elif case == 'crab': + + # Set model to fit + l = 184.56 + b = -5.78 + + alpha = -1.99 + beta = -2.32 + E0 = 531. * (alpha - beta) * u.keV + xp = E0 * (alpha + 2) / (alpha - beta) + piv = 500. * u.keV + K = 3.07e-5 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + + spectrum.alpha.min_value = -2.14 + spectrum.alpha.max_value = 3.0 + spectrum.beta.min_value = -5.0 + spectrum.beta.max_value = -2.15 + spectrum.xp.min_value = 1.0 + + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.alpha.delta = 0.01 + spectrum.beta.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + # Data preparation + binned_data = BinnedData(data_path / "crab.yaml") + bkg = BinnedData(data_path / "background.yaml") + + binned_data.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") + + bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') + + # SC attitude and orbit + ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori") + + else: + raise ValueError(r"Unknown case '{case}'") + + # Prepare instrument response dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5" dr = FullDetectorResponse.open(dr_path) @@ -91,7 +156,7 @@ def main(): bkg_dist += sys.float_info.min # ============ Interfaces ============== - data = grb_bkg.get_em_cds() + data = binned_data.get_em_cds() bkg = FreeNormBinnedBackground(bkg_dist) @@ -129,11 +194,97 @@ def main(): # 3Ml fit. Same as before plugins = DataList(cosi) + model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) like = JointLikelihood(model, plugins) like.fit() results = like.results print(results.display()) + + # plot + if case == 'crab': + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + results = like.results + + print(results.display()) + + parameters = {par.name: results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + print(results.optimized_model["source"]) + + energy = np.geomspace(100 * u.keV, 10 * u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + binned_energy_edges = binned_data.binned_data.axes['Em'].edges.value + binned_energy = np.array([]) + bin_sizes = np.array([]) + + for i in range(len(binned_energy_edges) - 1): + binned_energy = np.append(binned_energy, (binned_energy_edges[i + 1] + binned_energy_edges[i]) / 2) + bin_sizes = np.append(bin_sizes, binned_energy_edges[i + 1] - binned_energy_edges[i]) + + fig, ax = plt.subplots() + + ax.plot(energy, energy * energy * flux_median, label="Best fit") + ax.fill_between(energy, energy * energy * flux_lo, energy * energy * flux_hi, alpha=.5, + label="Best fit (errors)") + ax.plot(energy, energy * energy * flux_inj, color='black', ls=":", label="Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + plt.show() + + here + + else: + raise ValueError(r"Unknown case '{case}'") + + if __name__ == "__main__": import cProfile From 9f3ef28a651ed137e96941fe449194040f3abb80 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 16 Jul 2025 10:45:04 -0400 Subject: [PATCH 056/133] Make copy param optional Signed-off-by: Israel Martinez --- cosipy/background_estimation/free_norm_threeml_binned_bkg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 2671b8fd..6cb0fc1d 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -116,7 +116,7 @@ def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: def parameters(self) -> Dict[str, u.Quantity]: return {l:u.Quantity(n) for l,n in self.norms.items()} - def expectation(self, data:BinnedDataInterface, copy:bool)->Histogram: + def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: """ Parameters From 53bf6adde96723763e47ce511dbff6303e9115d8 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 17 Jul 2025 09:47:00 -0400 Subject: [PATCH 057/133] Fix exception name Signed-off-by: Israel Martinez --- cosipy/response/instrument_response.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index f6f45c0b..7f8e458e 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -102,7 +102,7 @@ def differential_effective_area(self, if not isinstance(axes["PsiChi"].coordsys, SpacecraftFrame): # Is inertial if attitude is None: - raise InputError("User need to provide the attitude information in order to transform to spacecraft coordinates") + raise RuntimeError("User need to provide the attitude information in order to transform to spacecraft coordinates") return self._differential_effective_area_inertial(attitude, axes, direction, polarization, weight, out, add_inplace) From d4b22649836fa1fc614ed41afa10300f18a92190 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 18 Jul 2025 13:30:07 -0400 Subject: [PATCH 058/133] Fix transformation of the orbit location as specified in an .ori file to GCRS Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 80 +++++++++--------------- 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index f5f16ea0..fc1beed6 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -3,9 +3,10 @@ import numpy as np import astropy.units as u +import astropy.constants as c from astropy.time import Time -from astropy.coordinates import SkyCoord, EarthLocation, GCRS, ITRS +from astropy.coordinates import SkyCoord, EarthLocation, GCRS, SphericalRepresentation, CartesianRepresentation from mhealpy import HealpixBase from histpy import Histogram, TimeAxis from mhealpy import HealpixMap @@ -28,7 +29,7 @@ class SpacecraftHistory: def __init__(self, obstime: Time, attitude: Attitude, - location: Union[EarthLocation, GCRS, ITRS], + location: GCRS, livetime: u.Quantity = None): """ Handles the spacecraft orientation. Calculates the dwell obstime @@ -42,7 +43,7 @@ def __init__(self, attitude: Spacecraft orientation with respect to an inertial system. location: - Location of the spacecraft at each timestamp. + Location of the spacecraft at each timestamp in Earth-centered inertial (ECI) coordinates. livetime: Time the instrument was live for the corresponding obstime bin. Should have one less element than the number of @@ -65,24 +66,7 @@ def __init__(self, self._attitude = attitude - self._location = self._standardize_location(location) - - def _standardize_location(self, location: Union[EarthLocation, GCRS, ITRS]): - - if isinstance(location, EarthLocation): - # Already the standard format - return location - - elif isinstance(location, GCRS): - # GCRS -> ITRS and call again - return self._standardize_location(location.transform_to(ITRS(self.obstime))) - - elif isinstance(location, ITRS): - # ITRS -> EarthLocation - return location.earth_location - - else: - raise TypeError(f"Location type {type(location)} not supported.") + self._gcrs = location @property def nintervals(self): @@ -125,8 +109,16 @@ def attitude(self): return self._attitude @property - def location(self)->EarthLocation: - return self._location + def location(self)->GCRS: + return self._gcrs + + @property + def earth_zenith(self) -> SkyCoord: + """ + Pointing of the Earth's zenith at the location of the SC + """ + gcrs_sph = self._gcrs.represent_as(SphericalRepresentation) + return SkyCoord(ra=gcrs_sph.lon, dec=gcrs_sph.lat, frame='icrs', copy=False) @classmethod def open(cls, file, tstart:Time = None, tstop:Time = None) -> "SpacecraftHistory": @@ -229,11 +221,11 @@ def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "Space livetime = livetime[:-1]*u.s # The last element is 0. # Currently, the orbit information is in a weird format. - # The altitude it's with respect to the Earth's source, like + # The altitude is specified with respect to the Earth's surface, like # you would specify it in a geodetic format, while # the lon/lat is specified in J2000, like you would in ECI. # Eventually everything should be in ECI (GCRS in astropy - # for all purposes), but for now let's do the conversion. + # for all practical purposes), but for now let's do the conversion. # 1. Get the direction in galactic # 2. Transform to GCRS, which uses RA/Dec (ICRS-like). # This is represented in the unit sphere @@ -241,11 +233,12 @@ def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "Space # Should take care of the non-spherical Earth # 4. Go back GCRS, now with the correct distance # (from the Earth's center) - zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic") + zenith_gal = SkyCoord(l=earth_lon * u.deg, b=earth_lat * u.deg, frame="galactic", copy = False) gcrs = zenith_gal.transform_to('gcrs') earth_loc = EarthLocation.from_geodetic(lon=gcrs.ra, lat=gcrs.dec, height=altitude*u.km) + gcrs2 = GCRS(ra=gcrs.ra, dec=gcrs.dec, distance=earth_loc.itrs.cartesian.norm(), copy=False) - return cls(time, attitude, earth_loc, livetime) + return cls(time, attitude, gcrs2, livetime) def _interp_attitude(self, points, weights) -> Attitude: """ @@ -281,7 +274,7 @@ def interp_attitude(self, time) -> Attitude: return self._interp_attitude(points, weights) - def _interp_location(self, points, weights) -> EarthLocation: + def _interp_location(self, points, weights) -> GCRS: """ Parameters @@ -295,18 +288,15 @@ def _interp_location(self, points, weights) -> EarthLocation: """ # TODO: we could do a better interpolation using more points and orbital dynamics - - x = self._location.x - y = self._location.y - z = self._location.z + x, y, z = self._gcrs.represent_as('cartesian').xyz x_interp = x[points[0]] * weights[0] + x[points[1]] * weights[1] y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] z_interp = z[points[0]] * weights[0] + z[points[1]] * weights[1] - interp_location = EarthLocation.from_geocentric(x=x_interp, y=y_interp, z=z_interp) + interp_gcrs = GCRS(x=x_interp, y=y_interp, z=z_interp, representation_type = 'cartesian') - return interp_location + return interp_gcrs def interp_location(self, time) -> EarthLocation: """ @@ -414,7 +404,7 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis # Center values new_obstime = self.obstime[start_points[1]:stop_points[1]] new_attitude = self._attitude.as_matrix()[start_points[1]:stop_points[1]] - new_location = self._location[start_points[1]:stop_points[1]] + new_location = self._gcrs[start_points[1]:stop_points[1]] new_livetime = self.livetime[start_points[1]:stop_points[0]] # Left edge @@ -566,7 +556,6 @@ def get_scatt_map(self, target_coord=None, scheme = 'ring', coordsys = 'galactic', - r_earth = None, earth_occ = True ) -> SpacecraftAttitudeMap: @@ -585,8 +574,6 @@ def get_scatt_map(self, The scheme of the scatt map (the default is "ring") coordsys : str, optional The coordinate system used in the scatt map (the default is "galactic). - r_earth : Quantity, optional - Earth radius in km (default is 6378 km). earth_occ : bool, optional Option to include Earth occultation in scatt map calculation. Default is True. @@ -597,22 +584,16 @@ def get_scatt_map(self, The spacecraft attitude map. """ - if r_earth is None: - r_earth = 6378.0 * u.km - # Check if target_coord is needed if earth_occ and target_coord is None: raise ValueError("target_coord is needed when earth_occ = True") # Get orientations - timestamps = self.obstime attitudes = self.attitude # Altitude at each point in the orbit: - altitude = self._location.height - - # Earth zenith at each point in the orbit: - earth_zenith = self.location.itrs + gcrs_cart = self._gcrs.represent_as(CartesianRepresentation) + dist_earth_center = gcrs_cart.norm() # Fill (only 2 axes needed to fully define the orientation) h_ori = SpacecraftAttitudeMap(nside = nside, @@ -622,8 +603,7 @@ def get_scatt_map(self, x,y,z = attitudes[:-1].as_axes() # Get max angle based on altitude: - max_angle = np.pi*u.rad - np.arcsin(r_earth/(r_earth + altitude)) - max_angle = max_angle.to_value(u.deg) # angles in degree + max_angle = np.pi*u.rad - np.arcsin(c.R_earth/dist_earth_center) # Define weights and set to 0 if blocked by Earth: weight = self.livetime @@ -631,10 +611,10 @@ def get_scatt_map(self, if earth_occ: # Calculate angle between source direction and Earth zenith # for each obstime stamp: - src_angle = target_coord.separation(earth_zenith) + src_angle = target_coord.separation(self.earth_zenith) # Get pointings that are occulted by Earth: - earth_occ_index = src_angle.value >= max_angle + earth_occ_index = src_angle >= max_angle # Mask weight[earth_occ_index[:-1]] = 0 From 3a0e9baa6cca2beedf948c05eb126ca2e8cf5515 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 18 Jul 2025 15:58:52 -0400 Subject: [PATCH 059/133] Fix bug with gcrs in cartesian Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index fc1beed6..793cbac2 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -404,7 +404,7 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis # Center values new_obstime = self.obstime[start_points[1]:stop_points[1]] new_attitude = self._attitude.as_matrix()[start_points[1]:stop_points[1]] - new_location = self._gcrs[start_points[1]:stop_points[1]] + new_location = self._gcrs[start_points[1]:stop_points[1]].cartesian.xyz new_livetime = self.livetime[start_points[1]:stop_points[0]] # Left edge @@ -420,10 +420,8 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis start_attitude = self._interp_attitude(start_points, start_weights) new_attitude = np.append(start_attitude.as_matrix()[None], new_attitude, axis=0) - start_location = self._interp_location(start_points, start_weights)[None] - new_location = EarthLocation.from_geocentric(np.append(start_location.x, new_location.x), - np.append(start_location.y, new_location.y), - np.append(start_location.z, new_location.z)) + start_location = self._interp_location(start_points, start_weights)[None].cartesian.xyz + new_location = np.append(start_location, new_location, axis = 1) first_livetime = self.livetime[start_points[0]] * start_weights[0] new_livetime = np.append(first_livetime, new_livetime) @@ -439,11 +437,11 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis new_attitude = np.append(new_attitude, stop_attitude.as_matrix()[None], axis=0) new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) - stop_location = self._interp_location(stop_points, stop_weights)[None] - new_location = EarthLocation.from_geocentric(np.append(new_location.x, stop_location.x), - np.append(new_location.y, stop_location.y), - np.append(new_location.z, stop_location.z)) + stop_location = self._interp_location(stop_points, stop_weights)[None].cartesian.xyz + new_location = np.append(new_location, stop_location, axis=1) + new_location = GCRS(x = new_location[0], y = new_location[1], z = new_location[2], + representation_type='cartesian') if np.all(start_points == stop_points): # This can only happen if the requested interval fell completely From ac7f843238be0e8363eaa493ec2e05201121f309 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 18 Jul 2025 15:59:11 -0400 Subject: [PATCH 060/133] Rever to being an example of the GRB fit only, and not Crab as well Signed-off-by: Israel Martinez --- ...ample_grb_fit_threeml_plugin_interfaces.py | 237 ++++-------------- 1 file changed, 48 insertions(+), 189 deletions(-) diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py index 14646430..038a636f 100644 --- a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py @@ -44,107 +44,50 @@ def main(): # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') - case = 'grb' - case = 'crab' - - if case == 'grb': - - # Set model to fit - l = 93. - b = -53. - - alpha = -1 - beta = -3 - xp = 450. * u.keV - piv = 500. * u.keV - K = 1 / u.cm / u.cm / u.s / u.keV - - spectrum = Band() - spectrum.beta.min_value = -15.0 - spectrum.alpha.value = alpha - spectrum.beta.value = beta - spectrum.xp.value = xp.value - spectrum.K.value = K.value - spectrum.piv.value = piv.value - spectrum.xp.unit = xp.unit - spectrum.K.unit = K.unit - spectrum.piv.unit = piv.unit - - source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l=l, # Longitude (deg) - b=b, # Latitude (deg) - spectral_shape=spectrum) # Spectral model - - # Date preparation - binned_data = BinnedData(data_path / "grb.yaml") - binned_data.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") - - bkg = BinnedData(data_path / "background.yaml") - - bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") - - bkg_tmin = 1842597310.0 - bkg_tmax = 1842597550.0 - bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] - bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] - bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') - - tmin = Time(1842597410.0, format='unix') - tmax = Time(1842597450.0, format='unix') - - elif case == 'crab': - - # Set model to fit - l = 184.56 - b = -5.78 - - alpha = -1.99 - beta = -2.32 - E0 = 531. * (alpha - beta) * u.keV - xp = E0 * (alpha + 2) / (alpha - beta) - piv = 500. * u.keV - K = 3.07e-5 / u.cm / u.cm / u.s / u.keV - - spectrum = Band() - - spectrum.alpha.min_value = -2.14 - spectrum.alpha.max_value = 3.0 - spectrum.beta.min_value = -5.0 - spectrum.beta.max_value = -2.15 - spectrum.xp.min_value = 1.0 - - spectrum.alpha.value = alpha - spectrum.beta.value = beta - spectrum.xp.value = xp.value - spectrum.K.value = K.value - spectrum.piv.value = piv.value - - spectrum.xp.unit = xp.unit - spectrum.K.unit = K.unit - spectrum.piv.unit = piv.unit - - spectrum.alpha.delta = 0.01 - spectrum.beta.delta = 0.01 - - source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l=l, # Longitude (deg) - b=b, # Latitude (deg) - spectral_shape=spectrum) # Spectral model - - # Data preparation - binned_data = BinnedData(data_path / "crab.yaml") - bkg = BinnedData(data_path / "background.yaml") - - binned_data.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") - bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") - - bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') - - # SC attitude and orbit - ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori") - - else: - raise ValueError(r"Unknown case '{case}'") + # Set model to fit + l = 93. + b = -53. + + alpha = -1 + beta = -3 + xp = 450. * u.keV + piv = 500. * u.keV + K = 1 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + spectrum.beta.min_value = -15.0 + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + # Date preparation + binned_data = BinnedData(data_path / "grb.yaml") + binned_data.load_binned_data_from_hdf5(binned_data=data_path / "grb_bkg_binned_data.hdf5") + + bkg = BinnedData(data_path / "background.yaml") + + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data_1s_local.hdf5") + + bkg_tmin = 1842597310.0 + bkg_tmax = 1842597550.0 + bkg_min = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmin)[0][0] + bkg_max = np.where(bkg.binned_data.axes['Time'].edges.value == bkg_tmax)[0][0] + bkg_dist = bkg.binned_data.slice[{'Time': slice(bkg_min, bkg_max)}].project('Em', 'Phi', 'PsiChi') + + tmin = Time(1842597410.0, format='unix') + tmax = Time(1842597450.0, format='unix') + ori = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori", tmin, tmax) + ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring # Prepare instrument response dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5" @@ -168,10 +111,10 @@ def main(): # can provide the response for an arbitrary directions, Ei and Pol values. # NOTE: this is currently only implemented for data in local coords psr = BinnedThreeMLPointSourceResponse(instrument_response, - sc_history=ori, - direction_axis = data.axes['PsiChi'], - energy_axis = dr.axes['Ei'], - polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) + sc_history=ori, + direction_axis = data.axes['PsiChi'], + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) response = BinnedThreeMLModelFolding(point_source_response = psr) @@ -201,90 +144,6 @@ def main(): print(results.display()) - # plot - if case == 'crab': - - alpha_inj = -1.99 - beta_inj = -2.32 - E0_inj = 531. * (alpha_inj - beta_inj) * u.keV - xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) - piv_inj = 100. * u.keV - K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV - - spectrum_inj = Band() - - spectrum_inj.alpha.min_value = -2.14 - spectrum_inj.alpha.max_value = 3.0 - spectrum_inj.beta.min_value = -5.0 - spectrum_inj.beta.max_value = -2.15 - spectrum_inj.xp.min_value = 1.0 - - spectrum_inj.alpha.value = alpha_inj - spectrum_inj.beta.value = beta_inj - spectrum_inj.xp.value = xp_inj.value - spectrum_inj.K.value = K_inj.value - spectrum_inj.piv.value = piv_inj.value - - spectrum_inj.xp.unit = xp_inj.unit - spectrum_inj.K.unit = K_inj.unit - spectrum_inj.piv.unit = piv_inj.unit - - results = like.results - - print(results.display()) - - parameters = {par.name: results.get_variates(par.path) - for par in results.optimized_model["source"].parameters.values() - if par.free} - - results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) - - print(results.optimized_model["source"]) - - energy = np.geomspace(100 * u.keV, 10 * u.MeV).to_value(u.keV) - - flux_lo = np.zeros_like(energy) - flux_median = np.zeros_like(energy) - flux_hi = np.zeros_like(energy) - flux_inj = np.zeros_like(energy) - - for i, e in enumerate(energy): - flux = results_err(e) - flux_median[i] = flux.median - flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) - flux_inj[i] = spectrum_inj.evaluate_at(e) - - binned_energy_edges = binned_data.binned_data.axes['Em'].edges.value - binned_energy = np.array([]) - bin_sizes = np.array([]) - - for i in range(len(binned_energy_edges) - 1): - binned_energy = np.append(binned_energy, (binned_energy_edges[i + 1] + binned_energy_edges[i]) / 2) - bin_sizes = np.append(bin_sizes, binned_energy_edges[i + 1] - binned_energy_edges[i]) - - fig, ax = plt.subplots() - - ax.plot(energy, energy * energy * flux_median, label="Best fit") - ax.fill_between(energy, energy * energy * flux_lo, energy * energy * flux_hi, alpha=.5, - label="Best fit (errors)") - ax.plot(energy, energy * energy * flux_inj, color='black', ls=":", label="Injected") - - ax.set_xscale("log") - ax.set_yscale("log") - - ax.set_xlabel("Energy (keV)") - ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") - - ax.legend() - - plt.show() - - here - - else: - raise ValueError(r"Unknown case '{case}'") - - if __name__ == "__main__": import cProfile From 00c5c436cd2a2d35f571ec20b11d6daade7dfbfc Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 18 Jul 2025 16:24:07 -0400 Subject: [PATCH 061/133] Add working example of Crab fit using new interfaces. - Move grb example to its own folder to avoid input/output files collisions Signed-off-by: Israel Martinez --- ...mple_crab_fit_threeml_plugin_interfaces.py | 460 ++++++++++++++++++ ...ample_grb_fit_threeml_plugin_interfaces.py | 0 2 files changed, 460 insertions(+) create mode 100644 docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py rename docs/api/interfaces/{ => examples/grb}/example_grb_fit_threeml_plugin_interfaces.py (100%) mode change 100644 => 100755 diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py new file mode 100644 index 00000000..342d609b --- /dev/null +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -0,0 +1,460 @@ +#!/usr/bin/env python +# coding: utf-8 + +# # Spectral fitting example (Crab) + +# **To run this, you need the following files, which can be downloaded using the first few cells of this notebook:** +# - orientation file (20280301_3_month_with_orbital_info.ori) +# - binned data (crab_bkg_binned_data.hdf5, crab_binned_data.hdf5, & bkg_binned_data.hdf5) +# - detector response (SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5) +# +# **The binned data are simulations of the Crab Nebula and albedo photon background produced using the COSI SMEX mass model. The detector response needs to be unzipped before running the notebook.** + +# This notebook fits the spectrum of a Crab simulated using MEGAlib and combined with background. +# +# [3ML](https://threeml.readthedocs.io/) is a high-level interface that allows multiple datasets from different instruments to be used coherently to fit the parameters of source model. A source model typically consists of a list of sources with parametrized spectral shapes, sky locations and, for extended sources, shape. Polarization is also possible. A "coherent" analysis, in this context, means that the source model parameters are fitted using all available datasets simultanously, rather than performing individual fits and finding a well-suited common model a posteriori. +# +# In order for a dataset to be included in 3ML, each instrument needs to provide a "plugin". Each plugin is responsible for reading the data, convolving the source model (provided by 3ML) with the instrument response, and returning a likelihood. In our case, we'll compute a binned Poisson likelihood: +# +# $$ +# \log \mathcal{L}(\mathbf{x}) = \sum_i \log \frac{\lambda_i(\mathbf{x})^{d_i} \exp (-\lambda_i)}{d_i!} +# $$ +# +# where $d_i$ are the counts on each bin and $\lambda_i$ are the expected counts given a source model with parameters $\mathbf{x}$. +# +# In this example, we will fit a single point source with a known location. We'll assume the background is known and fixed up to a scaling factor. Finally, we will fit a Band function: +# +# $$ +# f(x) = K \begin{cases} \left(\frac{x}{E_{piv}}\right)^{\alpha} \exp \left(-\frac{(2+\alpha) +# * x}{x_{p}}\right) & x \leq (\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \\ \left(\frac{x}{E_{piv}}\right)^{\beta} +# * \exp (\beta-\alpha)\left[\frac{(\alpha-\beta) x_{p}}{E_{piv}(2+\alpha)}\right]^{\alpha-\beta} +# * &x>(\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \end{cases} +# $$ +# +# where $K$ (normalization), $\alpha$ & $\beta$ (spectral indeces), and $x_p$ (peak energy) are the free parameters, while $E_{piv}$ is the pivot energy which is fixed (and arbitrary). +# +# Considering these assumptions: +# +# $$ +# \lambda_i(\mathbf{x}) = B*b_i + s_i(\mathbf{x}) +# $$ +# +# where $B*b_i$ are the estimated counts due to background in each bin with $B$ the amplitude and $b_i$ the shape of the background, and $s_i$ are the corresponding expected counts from the source, the goal is then to find the values of $\mathbf{x} = [K, \alpha, \beta, x_p]$ and $B$ that maximize $\mathcal{L}$. These are the best estimations of the parameters. +# +# The final module needs to also fit the time-dependent background, handle multiple point-like and extended sources, as well as all the spectral models supported by 3ML. Eventually, it will also fit the polarization angle. However, this simple example already contains all the necessary pieces to do a fit. + +# In[1]: + + +from cosipy import test_data, BinnedData +from cosipy.spacecraftfile import SpacecraftHistory +from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.util import fetch_wasabi_file + +from cosipy.statistics import PoissonLikelihood +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse + +import sys + +from scoords import SpacecraftFrame + +from astropy.time import Time +import astropy.units as u +from astropy.coordinates import SkyCoord, Galactic + +import numpy as np +import matplotlib.pyplot as plt + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from astromodels import Parameter + +from pathlib import Path + +import os + +def main(): + + # ## Download and read in binned data + + # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into + + # In[2]: + + + data_path = Path("") # /path/to/files. Current dir by default + + + # Download the orientation file (684.38 MB) + + # In[ ]: + + + fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') + + + # Download the binned Crab+background data (99.16 MB) + + # In[5]: + + + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_bkg_binned_data.hdf5', output=str(data_path / 'crab_bkg_binned_data.hdf5'), checksum = '85658e102414c4f746e64a7d29c607a4') + + + # Download the binned Crab data (13.16 MB) + + # In[7]: + + + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_binned_data.hdf5', output=str(data_path / 'crab_binned_data.hdf5'), checksum = '6e5bccb48556bdbd259519c52dec9dcb') + + + # Download the binned background data (89.10 MB) + + # In[9]: + + + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', output=str(data_path / 'bkg_binned_data.hdf5'), checksum = '54221d8556eb4ef520ef61da8083e7f4') + + + # Download the response file (596.06 MB) + + # In[10]: + + + # Before and after Jeremy's changes + fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + #fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5'), checksum = 'eb72400a1279325e9404110f909c7785') + + + # Read in the spacecraft orientation file + + # In[4]: + + + sc_orientation = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori") + + + # Create BinnedData objects for the Crab only, Crab+background, and background only. The Crab only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation + + # In[5]: + + + crab = BinnedData(data_path / "crab.yaml") + crab_bkg = BinnedData(data_path / "crab.yaml") + bkg = BinnedData(data_path / "background.yaml") + + + # Load binned .hdf5 files + + # In[6]: + + + crab.load_binned_data_from_hdf5(binned_data=data_path / "crab_binned_data.hdf5") + crab_bkg.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") + + + # Define the path to the detector response + + # In[7]: + + + # Before and after Jeremy's changes + dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") # path to detector response + #dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response + + + # ## Perform spectral fit + + # ============ Interfaces ============== + + output_suffix = 'interfaces' + + dr = FullDetectorResponse.open(dr) + instrument_response = BinnedInstrumentResponse(dr) + + # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin + + # In[8]: + + bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + bkg_dist += sys.float_info.min + + data = crab_bkg.get_em_cds() + + bkg = FreeNormBinnedBackground(bkg_dist) + + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + # NOTE: this is currently only implemented for data in local coords + psr = BinnedThreeMLPointSourceResponse(instrument_response, + sc_history=sc_orientation, + direction_axis = data.axes['PsiChi'], + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) + + ##==== + + + response = BinnedThreeMLModelFolding(point_source_response = psr) + + like_fun = PoissonLikelihood() + like_fun.set_data(data) + like_fun.set_response(response) + like_fun.set_background(bkg) + + cosi = ThreeMLPluginInterface('cosi', like_fun) + + # Nuisance parameter guess, bounds, etc. + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + 1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value=5, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + + # ======== Interfaces end ========== + + # Define a point source at the known location with a Band function spectrum and add it to the model. The initial values of the Band function parameters are set to the true values used to simulate the source + + + # In[9]: + + + l = 184.56 + b = -5.78 + + alpha = -1.99 + beta = -2.32 + E0 = 531. * (alpha - beta) * u.keV + xp = E0 * (alpha + 2) / (alpha - beta) + piv = 500. * u.keV + K = 3.07e-5 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + + spectrum.alpha.min_value = -2.14 + spectrum.alpha.max_value = 3.0 + spectrum.beta.min_value = -5.0 + spectrum.beta.max_value = -2.15 + spectrum.xp.min_value = 1.0 + + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.alpha.delta = 0.01 + spectrum.beta.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l = l, # Longitude (deg) + b = b, # Latitude (deg) + spectral_shape = spectrum) # Spectral model + + # Optional: free the position parameters + #source.position.l.free = True + #source.position.b.free = True + + model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + + # Optional: if you want to call get_log_like manually, then you also need to set the model manually + # 3ML does this internally during the fit though + cosi.set_model(model) + + + # Gather all plugins and combine with the model in a JointLikelihood object, then perform maximum likelihood fit + + # In[10]: + + + plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) + + like = JointLikelihood(model, plugins, verbose = False) + + like.fit() + + + # ## Error propagation and plotting (Band function) + + # Define Band function spectrum injected into MEGAlib + + # In[11]: + + ## Injected + + l = 184.56 + b = -5.78 + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + # Expectation for injected source + source_inj = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum_inj) # Spectral model + + psr.set_source(source_inj) + expectation_inj = psr.expectation(data, copy=True) + + + # The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the "evaluate_at" method of the spectrum + + # In[12]: + + + results = like.results + + + print(results.display()) + + parameters = {par.name:results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + print(results.optimized_model["source"]) + + # Evaluate the flux and errors at a range of energies for the fitted and injected spectra, and the simulated source flux + + # In[13]: + + + energy = np.geomspace(100*u.keV,10*u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + binned_energy_edges = crab.binned_data.axes['Em'].edges.value + binned_energy = np.array([]) + bin_sizes = np.array([]) + + for i in range(len(binned_energy_edges)-1): + binned_energy = np.append(binned_energy, (binned_energy_edges[i+1] + binned_energy_edges[i]) / 2) + bin_sizes = np.append(bin_sizes, binned_energy_edges[i+1] - binned_energy_edges[i]) + + expectation = response.expectation(data, copy = True) + + + # Plot the fitted and injected spectra + + # In[14]: + + + fig,ax = plt.subplots() + + ax.plot(energy, energy*energy*flux_median, label = "Best fit") + ax.fill_between(energy, energy*energy*flux_lo, energy*energy*flux_hi, alpha = .5, label = "Best fit (errors)") + ax.plot(energy, energy*energy*flux_inj, color = 'black', ls = ":", label = "Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + plt.show() + + # Plot the fitted spectrum convolved with the response, as well as the simulated source counts + + # In[15]: + + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response") + ax.stairs(expectation_inj.project('Em').todense().contents, binned_energy_edges, color='blue', label = "Injected spectrum convolved with response") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(crab.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Source counts") + ax.errorbar(binned_energy, crab.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + plt.show() + + + # Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts + + # In[16]: + + expectation_bkg = bkg.expectation(data, copy = True) + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents + expectation_bkg.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response plus background") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(crab_bkg.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") + ax.errorbar(binned_energy, crab_bkg.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab_bkg.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + plt.show() + + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof_interfaces.prof") + exit() + + main() \ No newline at end of file diff --git a/docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py old mode 100644 new mode 100755 similarity index 100% rename from docs/api/interfaces/example_grb_fit_threeml_plugin_interfaces.py rename to docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py From 5c1013d29572a83c8d04349a9866103e64e6bda3 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 18 Jul 2025 16:27:06 -0400 Subject: [PATCH 062/133] Also move the toy example Signed-off-by: Israel Martinez --- docs/api/interfaces/{ => examples/toy}/toy_interfaces_example.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/api/interfaces/{ => examples/toy}/toy_interfaces_example.py (100%) diff --git a/docs/api/interfaces/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py similarity index 100% rename from docs/api/interfaces/toy_interfaces_example.py rename to docs/api/interfaces/examples/toy/toy_interfaces_example.py From f50f7bfa5f891f3f18466de49ecc8e3e95f42f94 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 21 Jul 2025 17:49:33 -0400 Subject: [PATCH 063/133] Pass around the BinnedDataInterface implementation instead of the bare axes Signed-off-by: Israel Martinez --- cosipy/interfaces/instrument_response_interface.py | 7 ++++--- cosipy/response/PointSourceResponse.py | 12 ++++++++---- cosipy/response/instrument_response.py | 13 ++++++++++--- cosipy/response/threeml_point_source_response.py | 2 +- 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index a465ea01..61adc065 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -7,6 +7,7 @@ from astropy import units as u from scoords import Attitude +from cosipy.interfaces import BinnedDataInterface from cosipy.polarization import PolarizationAngle __all__ = ["BinnedInstrumentResponseInterface"] @@ -14,7 +15,7 @@ class BinnedInstrumentResponseInterface(Protocol): def differential_effective_area(self, - axes: Axes, + data: BinnedDataInterface, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle, @@ -26,8 +27,8 @@ def differential_effective_area(self, Parameters ---------- - axes: - Measured axes + data: + Binned data direction: Photon incoming direction. If not in a SpacecraftFrame, then provide an attitude for the transformation energy: diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index 753ba122..d6c4aa3f 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -4,7 +4,7 @@ from cosipy.polarization.polarization_axis import PolarizationAxis from cosipy.threeml.util import to_linear_polarization from mhealpy import HealpixMap -from cosipy.interfaces import BinnedInstrumentResponseInterface +from cosipy.interfaces import BinnedInstrumentResponseInterface, BinnedDataInterface from histpy import Histogram, Axis, Axes # , Axes, Axis import numpy as np @@ -16,6 +16,7 @@ import logging from cosipy.spacecraftfile import SpacecraftAttitudeMap +from ..data_io import EmCDSBinnedData logger = logging.getLogger(__name__) @@ -157,7 +158,7 @@ def from_dwell_time_map(cls, @classmethod def from_scatt_map(cls, coord: SkyCoord, - measured_axes:Axes, + data:BinnedDataInterface, response: BinnedInstrumentResponseInterface, scatt_map: SpacecraftAttitudeMap, energy_axis: Axis, @@ -178,12 +179,15 @@ def from_scatt_map(cls, """ + if not isinstance(data, EmCDSBinnedData): + raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") + axes = [energy_axis] if polarization_axis is not None: axes += [polarization_axis] - axes += list(measured_axes) + axes += list(data.axes) axes = Axes(axes) psr = Quantity(np.empty(shape=axes.shape), unit = u.cm * u.cm * u.s) @@ -196,7 +200,7 @@ def from_scatt_map(cls, y=scatt_map.axes['y'].pix2skycoord(pixels[1])) - response.differential_effective_area(measured_axes, + response.differential_effective_area(data, coord, energy_axis.centers, None if polarization_axis is None else polarization_axis.centers, diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index 7f8e458e..70136060 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -6,6 +6,8 @@ from astropy.units import Quantity from scoords import Attitude, SpacecraftFrame +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces import BinnedDataInterface from cosipy.interfaces.instrument_response_interface import BinnedInstrumentResponseInterface from cosipy.polarization import PolarizationAngle, PolarizationAxis @@ -27,7 +29,7 @@ def is_polarization_response(self): return 'Pol' in self._dr.axes.labels def differential_effective_area(self, - axes: Axes, + data: BinnedDataInterface, direction: SkyCoord, energy:u.Quantity, polarization:PolarizationAngle = None, @@ -44,8 +46,8 @@ def differential_effective_area(self, Parameters ---------- - axes: - Measured axes + data + Binned measurements. We can only handle EmCDSBinnedData direction: Photon incoming direction in SC coordinates energy: @@ -71,6 +73,11 @@ def differential_effective_area(self, """ # Check if we're getting the expected axes and other limitations + if not isinstance(data, EmCDSBinnedData): + raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") + + axes = data.axes + if set(axes.labels) != {'Em','PsiChi','Phi'}: raise ValueError(f"Unexpected axes labels. Expecting \"{{'Em','PsiChi','Phi'}}\", got {axes.labels}") diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index c66fece6..79b279c5 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -202,7 +202,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: earth_occ=True) self._psr = PointSourceResponse.from_scatt_map(coord, - data.axes, + data, self._response, scatt_map, self._energy_axis, From a983e47cf75751d902277a0e1d6ccbac772c3d67 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 21 Jul 2025 18:16:32 -0400 Subject: [PATCH 064/133] Remove the need for a full direction axis for a POINTSourceResponse Signed-off-by: Israel Martinez --- .../response/threeml_point_source_response.py | 37 +++++-------------- ...mple_crab_fit_threeml_plugin_interfaces.py | 4 +- 2 files changed, 12 insertions(+), 29 deletions(-) diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 79b279c5..e95083e1 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -41,10 +41,9 @@ class BinnedThreeMLPointSourceResponse(BinnedThreeMLSourceResponseInterface): def __init__(self, instrument_response: BinnedInstrumentResponseInterface, sc_history: SpacecraftHistory, - direction_axis: HealpixAxis, energy_axis:Axis, polarization_axis:PolarizationAxis = None, - scattmap_nside = None + nside = None ): """ @@ -56,20 +55,15 @@ def __init__(self, polarization. sc_history: The SpacecraftHistory describing the SC orbit and attitude vs time. - direction_axis: - The desired effective binning of the photon direction (aka NuLamda). - This also determines the coordinate system. If the coordinate system is - inertial, then internally the `instrument_response` will be rotated - from local coordinate based on the `sc_history` information vs time. energy_axis: The desired effective binning of the photon energy (aka Ei) polarization_axis: The desired effective binning of the photon polarization angle (aka Pol). This also defined the polarization coordinate system and convention. - scattmap_nside: - If transformation from local to an inertial system is needed, the spacecraft - attitude will be first discretized based on this nside. Default: twice the - nside of direction_axis. + nside: + - If transformation from local to an inertial system is needed, the spacecraft + attitude will be first discretized based on this nside. + - If local, this is the nside of the dwell time map """ # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface @@ -96,18 +90,10 @@ def __init__(self, self._psr = None self._response = instrument_response - self._direction_axis = direction_axis self._energy_axis = energy_axis self._polarization_axis = polarization_axis - if scattmap_nside is None: - self._scattmap_nside = 2*self._direction_axis.nside - else: - self._scattmap_nside = scattmap_nside - - @property - def coordsys(self): - return self._direction_axis.coordsys + self._nside = nside def clear_cache(self): @@ -177,15 +163,12 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: coordsys = data.axes["PsiChi"].coordsys - if coordsys != self.coordsys: - raise ValueError(f"Coordinate system mismatch. Data has {coordsys} while this class has {self.coordsys}.") - logger.info("... Calculating point source response ...") - if isinstance(self.coordsys, SpacecraftFrame): + if isinstance(coordsys, SpacecraftFrame): # Local coordinates - dwell_time_map = self._sc_ori.get_dwell_map(coord, base=self._direction_axis) + dwell_time_map = self._sc_ori.get_dwell_map(coord, nside = self._nside) self._psr = PointSourceResponse.from_dwell_time_map(data.axes, self._response, @@ -196,9 +179,9 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: else: # Inertial e..g. galactic - scatt_map = self._sc_ori.get_scatt_map(nside=self._scattmap_nside, + scatt_map = self._sc_ori.get_scatt_map(nside=self._nside, target_coord=coord, - coordsys=self._direction_axis.coordsys, + coordsys=coordsys, earth_occ=True) self._psr = PointSourceResponse.from_scatt_map(coord, diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 342d609b..c55dce4b 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -197,9 +197,9 @@ def main(): # NOTE: this is currently only implemented for data in local coords psr = BinnedThreeMLPointSourceResponse(instrument_response, sc_history=sc_orientation, - direction_axis = data.axes['PsiChi'], energy_axis = dr.axes['Ei'], - polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside = 2*data.axes['PsiChi'].nside) ##==== From dca817d3e740e71e1c79d619970c5130c29edaad Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 24 Jul 2025 15:12:39 -0400 Subject: [PATCH 065/133] Unbinned data interface Signed-off-by: Israel Martinez --- cosipy/data_io/BinnedData.py | 24 +++++- cosipy/interfaces/data_interface.py | 80 +++++++++++++++-- cosipy/interfaces/expectation_interface.py | 6 +- cosipy/interfaces/likelihood_interface.py | 2 +- cosipy/interfaces/measurements.py | 86 ++++++++++++++++++- .../examples/toy/toy_interfaces_example.py | 20 +++-- 6 files changed, 198 insertions(+), 20 deletions(-) diff --git a/cosipy/data_io/BinnedData.py b/cosipy/data_io/BinnedData.py index 8249c4a0..b0c72fbd 100644 --- a/cosipy/data_io/BinnedData.py +++ b/cosipy/data_io/BinnedData.py @@ -13,7 +13,8 @@ import astropy.units as u from astropy.coordinates import SkyCoord -from cosipy.interfaces import BinnedDataInterface +from cosipy.interfaces import BinnedDataInterface, QuantityMeasurement, AngleMeasurement, SkyCoordMeasurement +from cosipy.interfaces.data_interface import EventData logger = logging.getLogger(__name__) @@ -534,4 +535,23 @@ def data(self) -> Histogram: return self._data @property def axes(self) -> Axes: - return self._data.axes \ No newline at end of file + return self._data.axes + +class EmCDSEventData(EventData): + + def __init__(self, + energy:QuantityMeasurement, + scattering_angle:AngleMeasurement, + scattering_direction:SkyCoordMeasurement): + + if energy.label != "Em": + raise ValueError("The measured energy is expected to be labeled 'Em'") + + if scattering_angle.label != "Phi": + raise ValueError("The scattering angle is expected to be labeled 'Phi'") + + if scattering_direction.label != "PsiChi": + raise ValueError("The scattering direction is expected to be labeled 'PsiChi'") + + super().__init__(energy, scattering_angle, scattering_direction) + diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index d32225f4..943ff7bc 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,13 +1,13 @@ -from typing import Protocol, runtime_checkable, Dict, Type, Any +from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator from histpy import Histogram, Axes -from .measurements import Measurements +from .measurements import Measurement import histpy __all__ = ["DataInterface", - "UnbinnedDataInterface", + "EventDataInterface", "BinnedDataInterface"] @runtime_checkable @@ -17,9 +17,79 @@ class DataInterface(Protocol): """ @runtime_checkable -class UnbinnedDataInterface(DataInterface, Protocol): +class EventDataInterface(DataInterface, Protocol): + + def __getitem__(self, item) -> Tuple:... + + def __iter__(self) -> Iterator[Tuple]:... + + @property + def nevents(self) -> int:... + + @property + def labels(self) -> Tuple[str]:... + + @property + def types(self) -> Tuple[type]:... + + @property + def nvars(self) -> int:... + +class EventData(EventDataInterface): + """ + Generic event data from measurement + """ + + def __init__(self, *data:Measurement): + + # Check shame + size = None + + for data_i in data: + + if size is None: + size = data_i.size + else: + if size != data_i.size: + raise ValueError("All measurement arrays must have the same size") + + self._nevents = size + self._events = data + self._labels = tuple([d.label for d in data]) + self._types = tuple([type(d) for d in data]) + self._value_types = tuple([d.value_type for d in data]) + + def __getitem__(self, item): + + if isinstance(item, str): + return self._events[self._labels.index(item)] + elif isinstance(item, int): + return tuple([d[item] for d in self._events]) + else: + raise TypeError("Index must be either a measurement label or an entry position.") + + def __iter__(self): + return zip(self._events) + + @property + def nevents(self): + return self._nevents + + @property + def labels(self): + return self._labels + + @property + def types(self): + return self._types + + @property + def value_types(self): + return self._value_types + @property - def measurements(self) -> Measurements:... + def nvars(self) -> int: + return len(self._events) @runtime_checkable class BinnedDataInterface(DataInterface, Protocol): diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 635c2c03..70ac0e18 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -2,9 +2,7 @@ import histpy import numpy as np -from cosipy.interfaces import BinnedDataInterface - -from .measurements import Measurements +from cosipy.interfaces import BinnedDataInterface, EventDataInterface __all__ = [ "UnbinnedExpectationInterface", @@ -36,7 +34,7 @@ def expectation(self, data:BinnedDataInterface, copy:bool)->histpy.Histogram: class UnbinnedExpectationInterface(ExpectationInterface, Protocol): @property def ncounts(self) -> float:... - def probability(self, measurements:Measurements) -> np.ndarray:... + def probability(self, data:EventDataInterface) -> np.ndarray:... diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index 7bd86ab0..d158266b 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -5,7 +5,7 @@ 'UnbinnedLikelihoodInterface'] from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface, ExpectationInterface -from .data_interface import UnbinnedDataInterface, BinnedDataInterface, DataInterface +from .data_interface import BinnedDataInterface, DataInterface from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface, BackgroundInterface @runtime_checkable diff --git a/cosipy/interfaces/measurements.py b/cosipy/interfaces/measurements.py index cbfec53c..b4c81ed0 100644 --- a/cosipy/interfaces/measurements.py +++ b/cosipy/interfaces/measurements.py @@ -1,7 +1,87 @@ +from abc import ABC, abstractmethod +from typing import Tuple, Type, TypeVar, Generic, ClassVar + +import numpy as np +from astropy.coordinates import SkyCoord, Angle +from astropy.units import Quantity + +class Measurement(ABC): + + def __init__(self, label:str, *args, **kwargs): + self._label = label + + @property + def label(self) -> str: + return self._label + + @property + @abstractmethod + def size(self) -> int:... + + @property + @abstractmethod + def value_type(self) -> Type:... + + @abstractmethod + def __getitem__(self, item:int):... + + @abstractmethod + def __iter__(self):... + +T = TypeVar('T') +t = TypeVar('t') + +class ArrayLikeMeasurement(Measurement, Generic[T,t]): + """ + Data already implements and iterable, [] and size + """ + + _value_type = ClassVar[type] + + def __init__(self, data:T, label:str, *args, **kwargs): + + self._data = data + super().__init__(label) -class Measurements: - # Dummy for now @property def size(self) -> int: - return 10 + return self._data.size + + @property + def value_type(self) -> Type: + return self._value_type + + def __getitem__(self, item:int) -> t: + return self._data[item] + + def __iter__(self) -> t: + return self._data.__iter__() + +class QuantityMeasurement(ArrayLikeMeasurement[Quantity, Quantity]): + """ + """ + _value_type = Quantity + +class SkyCoordMeasurement(ArrayLikeMeasurement[SkyCoord, SkyCoord]): + """ + + """ + _value_type = SkyCoord + +class AngleMeasurement(ArrayLikeMeasurement[Angle, Angle]): + """ + + """ + _value_type = Angle + +class FloatingMeasurement(ArrayLikeMeasurement[np.ndarray, np.floating]): + + _value_type = np.floating + + def __init__(self, data: np.ndarray, label: str, *args, **kwargs): + + if not np.issubdtype(data.dtype, np.floating): + raise TypeError("This class expect float or double types") + self._data = data + super().__init__(data, label) diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 99b482ab..17308be8 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -5,6 +5,7 @@ from astromodels.core.polarization import Polarization import astropy.units as u from cosipy import SpacecraftHistory +from cosipy.interfaces.data_interface import EventData from cosipy.statistics import PoissonLikelihood @@ -12,7 +13,7 @@ BinnedBackgroundInterface, BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, - ThreeMLPluginInterface, BackgroundInterface) + ThreeMLPluginInterface, BackgroundInterface, FloatingMeasurement) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -43,7 +44,7 @@ nevents_signal = 1000 nevents_bkg = 1000 -class ToyData(BinnedDataInterface): +class ToyData(BinnedDataInterface, EventData): # Random data. Normal signal on top of uniform bkg # Since the interfaces are Protocols, they don't *have* # to derive from the base class, but doing some helps @@ -53,10 +54,20 @@ def __init__(self): self._data = Histogram(toy_axis) # Signal - self._data.fill(norm.rvs(size=nevents_signal)) + event_data = norm.rvs(size=nevents_signal) # Bkg - self._data.fill(uniform.rvs(toy_axis.lo_lim, toy_axis.hi_lim-toy_axis.lo_lim, size=nevents_bkg)) + bkg_event_data = uniform.rvs(toy_axis.lo_lim, toy_axis.hi_lim-toy_axis.lo_lim, size=nevents_bkg) + + # Join + event_data = np.append(event_data, bkg_event_data) + + # Binned + self._data.fill(event_data) + + #Unbinned + measurements = FloatingMeasurement(event_data, 'x') + EventData.__init__(self, measurements) @property def data(self) -> Histogram: @@ -66,7 +77,6 @@ def data(self) -> Histogram: def axes(self) -> Axes: return self._data.axes - class ToyBkg(BinnedBackgroundInterface): """ Models a uniform background From fcdca5fe4595dcda9185ce759d219fa3729b4bce Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 25 Jul 2025 17:38:29 -0400 Subject: [PATCH 066/133] Machinery for unbinned toy example is now working Signed-off-by: Israel Martinez --- cosipy/interfaces/background_interface.py | 6 +- cosipy/interfaces/data_interface.py | 9 +- cosipy/interfaces/expectation_interface.py | 22 ++++- cosipy/interfaces/likelihood_interface.py | 12 +-- cosipy/interfaces/measurements.py | 5 + .../interfaces/source_response_interface.py | 6 +- cosipy/statistics/likelihood_functions.py | 84 ++++++++++++++--- .../examples/toy/toy_interfaces_example.py | 93 +++++++++++++++++-- 8 files changed, 198 insertions(+), 39 deletions(-) diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index da655e1f..8c8cf060 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -10,12 +10,12 @@ logger = logging.getLogger(__name__) -from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface +from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface __all__ = [ "BackgroundInterface", "BinnedBackgroundInterface", - "UnbinnedBackgroundInterface", + "BackgroundDensityInterface", ] @runtime_checkable @@ -31,7 +31,7 @@ class BinnedBackgroundInterface(BackgroundInterface, BinnedExpectationInterface, """ @runtime_checkable -class UnbinnedBackgroundInterface(BackgroundInterface, UnbinnedExpectationInterface, Protocol): +class BackgroundDensityInterface(BackgroundInterface, ExpectationDensityInterface, Protocol): """ No new methods, just the inherited one """ diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 943ff7bc..dff40f4d 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,4 +1,4 @@ -from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator +from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union from histpy import Histogram, Axes @@ -19,7 +19,12 @@ class DataInterface(Protocol): @runtime_checkable class EventDataInterface(DataInterface, Protocol): - def __getitem__(self, item) -> Tuple:... + def __getitem__(self, item) -> Union[Tuple, Measurement]: + """ + If item is: + - str: the value of specific measurement for all events + - int: all measurements for an specific event + """ def __iter__(self) -> Iterator[Tuple]:... diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 70ac0e18..1a0468fb 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -5,7 +5,7 @@ from cosipy.interfaces import BinnedDataInterface, EventDataInterface __all__ = [ - "UnbinnedExpectationInterface", + "ExpectationDensityInterface", "BinnedExpectationInterface" ] @@ -31,10 +31,24 @@ def expectation(self, data:BinnedDataInterface, copy:bool)->histpy.Histogram: """ @runtime_checkable -class UnbinnedExpectationInterface(ExpectationInterface, Protocol): - @property +class ExpectationDensityInterface(ExpectationInterface, Protocol): def ncounts(self) -> float:... - def probability(self, data:EventDataInterface) -> np.ndarray:... + def expectation_density(self, data:EventDataInterface, copy:bool) -> np.ndarray: + """ + + + Parameters + ---------- + data + copy: + If True, it will return an array that the user if free to modify. + Otherwise, it will result a reference, possible to the cache, that + the user should not modify + + Returns + ------- + + """ diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index d158266b..249099cc 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -4,9 +4,9 @@ 'BinnedLikelihoodInterface', 'UnbinnedLikelihoodInterface'] -from .expectation_interface import UnbinnedExpectationInterface, BinnedExpectationInterface, ExpectationInterface -from .data_interface import BinnedDataInterface, DataInterface -from .background_interface import UnbinnedBackgroundInterface, BinnedBackgroundInterface, BackgroundInterface +from .expectation_interface import ExpectationDensityInterface, BinnedExpectationInterface, ExpectationInterface +from .data_interface import BinnedDataInterface, DataInterface, EventDataInterface +from .background_interface import BackgroundDensityInterface, BinnedBackgroundInterface, BackgroundInterface @runtime_checkable class LikelihoodInterface(Protocol): @@ -47,14 +47,14 @@ class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): Needs to check that data, response and bkg are compatible """ def set_data(self, data: DataInterface): - if not isinstance(data, UnbinnedDataInterface): + if not isinstance(data, EventDataInterface): raise TypeError("Incorrect data type for unbinned likelihood.") def set_response(self, response: ExpectationInterface): - if not isinstance(response, UnbinnedExpectationInterface): + if not isinstance(response, ExpectationDensityInterface): raise TypeError("Incorrect data type for unbinned likelihood.") def set_background(self, bkg: BackgroundInterface): - if not isinstance(bkg, UnbinnedBackgroundInterface): + if not isinstance(bkg, BackgroundDensityInterface): raise TypeError("Incorrect background type for unbinned likelihood.") diff --git a/cosipy/interfaces/measurements.py b/cosipy/interfaces/measurements.py index b4c81ed0..f135a5d9 100644 --- a/cosipy/interfaces/measurements.py +++ b/cosipy/interfaces/measurements.py @@ -57,6 +57,11 @@ def __getitem__(self, item:int) -> t: def __iter__(self) -> t: return self._data.__iter__() + @property + def data(self) -> T: + return self._data + + class QuantityMeasurement(ArrayLikeMeasurement[Quantity, Quantity]): """ """ diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 7ccbe83e..5474cc20 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -2,7 +2,7 @@ from astromodels import Model from astromodels.sources import Source -from .expectation_interface import BinnedExpectationInterface, UnbinnedExpectationInterface +from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface from cosipy.spacecraftfile import SpacecraftHistory @@ -23,7 +23,7 @@ def set_model(self, model: Model): """ @runtime_checkable -class UnbinnedThreeMLModelFoldingInterface(UnbinnedExpectationInterface, ThreeMLModelFoldingInterface, Protocol): +class UnbinnedThreeMLModelFoldingInterface(ThreeMLModelFoldingInterface, ExpectationDensityInterface, Protocol): """ No new methods. Just the inherited ones. """ @@ -59,7 +59,7 @@ def copy(self) -> "ThreeMLSourceResponseInterface": """ @runtime_checkable -class UnbinnedThreeMLSourceResponseInterface(UnbinnedExpectationInterface, ThreeMLSourceResponseInterface, Protocol): +class UnbinnedThreeMLSourceResponseInterface(ThreeMLSourceResponseInterface, ExpectationDensityInterface, Protocol): """ No new methods. Just the inherited ones. """ diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index db9ef6e2..05fa63e0 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -1,6 +1,6 @@ import logging -from cosipy.interfaces.expectation_interface import ExpectationInterface +from cosipy.interfaces.expectation_interface import ExpectationInterface, ExpectationDensityInterface logger = logging.getLogger(__name__) @@ -8,7 +8,8 @@ UnbinnedLikelihoodInterface, BinnedDataInterface, BinnedExpectationInterface, - BinnedBackgroundInterface, DataInterface, BackgroundInterface, + BinnedBackgroundInterface, DataInterface, BackgroundInterface, EventDataInterface, + BackgroundDensityInterface, ) import numpy as np @@ -17,7 +18,68 @@ 'PoissonLikelihood'] class UnbinnedLikelihood(UnbinnedLikelihoodInterface): - ... + def __init__(self): + + self._data = None + self._bkg = None + self._response = None + + def set_data(self, data: DataInterface): + super().set_data(data) # Checks type + self._data = data + + def set_response(self, response: ExpectationInterface): + super().set_response(response) # Checks type + self._response = response + + def set_background(self, bkg: BackgroundInterface): + super().set_background(bkg) # Checks type + self._bkg = bkg + + @property + def data (self) -> EventDataInterface: return self._data + @property + def response(self) -> ExpectationDensityInterface: return self._response + @property + def bkg (self) -> BackgroundDensityInterface: return self._bkg + + @property + def has_bkg(self): + return self._bkg is not None + + @property + def nobservations(self) -> int: + + if self._data is None: + raise RuntimeError("Set the data before calling this function.") + + return self._data.nevents + + def get_log_like(self) -> float: + + if self._data is None or self._response is None: + raise RuntimeError("Set data and response before calling this function.") + + # Compute expectation including background + + ntot = self._response.ncounts() + + # If we don't have background, we won't modify the expectation, so + # it's safe to use the internal cache. + density = self._response.expectation_density(self._data, copy = self.has_bkg) + + if self.has_bkg: + + ntot += self._bkg.ncounts() + + # We won't modify the bkg expectation, so it's safe to use the internal cache + density += self._bkg.expectation_density(self._data, copy = False) + + # Compute the log-likelihood: + log_like = np.sum(np.log(density)) - ntot + + return log_like + class PoissonLikelihood(BinnedLikelihoodInterface): def __init__(self): @@ -49,6 +111,13 @@ def bkg (self) -> BinnedBackgroundInterface: return self._bkg def has_bkg(self): return self._bkg is not None + @property + def nobservations(self) -> int: + if self._data is None: + raise RuntimeError("Set the data before calling this function.") + + return self._data.data.contents.size + def get_log_like(self) -> float: if self._data is None or self._response is None: @@ -72,12 +141,3 @@ def get_log_like(self) -> float: return log_like - @property - def nobservations(self) -> int: - - if self._data is None: - raise RuntimeError("Set the data before calling this function.") - - return self._data.data.contents.size - - diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 17308be8..124e45e6 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -5,15 +5,18 @@ from astromodels.core.polarization import Polarization import astropy.units as u from cosipy import SpacecraftHistory -from cosipy.interfaces.data_interface import EventData +from cosipy.interfaces.background_interface import BackgroundDensityInterface +from cosipy.interfaces.data_interface import EventData, EventDataInterface + +from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood -from cosipy.statistics import PoissonLikelihood from cosipy.interfaces import (BinnedDataInterface, BinnedBackgroundInterface, BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, - ThreeMLPluginInterface, BackgroundInterface, FloatingMeasurement) + ThreeMLPluginInterface, BackgroundInterface, FloatingMeasurement, + UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -77,7 +80,7 @@ def data(self) -> Histogram: def axes(self) -> Axes: return self._data.axes -class ToyBkg(BinnedBackgroundInterface): +class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): """ Models a uniform background """ @@ -87,9 +90,19 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 + # Doesn't need to be normalized + self._unit_expectation_density = np.broadcast_to(1/(toy_axis.hi_lim - toy_axis.lo_lim), data.nevents) + def set_parameters(self, **parameters:u.Quantity) -> None: self._norm = parameters['norm'].value + def ncounts(self) -> float: + return self._norm + + def expectation_density(self, data: EventDataInterface, copy: bool = True) -> np.ndarray: + #Always a copy + return self._norm*self._unit_expectation_density + @property def parameters(self) -> Dict[str, u.Quantity]: return {'norm': u.Quantity(self._norm)} @@ -105,7 +118,7 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: # Always a copy return self._unit_expectation * self._norm -class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface): +class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): """ This models a Gaussian signal in 1D, centered at 0 and with std = 1. The normalization --the "flux"-- is the only free parameters @@ -116,6 +129,27 @@ def __init__(self): self._unit_expectation = Histogram(toy_axis, contents=np.diff(norm.cdf(toy_axis.edges))) + def ncounts(self) -> float: + + if self._source is None: + raise RuntimeError("Set a source first") + + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. + ns_events = self._source.spectrum.main.shape.k.value + return ns_events + + def expectation_density(self, data:EventDataInterface, copy:bool) -> np.ndarray: + + if not isinstance(data, ToyData): + raise TypeError(f"This class only support data of type {ToyData}") + + # I expect in the real case it'll be more efficient to compute + # (ncounts, ncounts*prob) than (ncounts, prob) + + # Always copies + return self.ncounts()*norm.pdf(data['x'].data) + def set_source(self, source: Source): if not isinstance(source, PointSource): @@ -136,17 +170,17 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: # Get the latest values of the flux # Remember that _model can be modified externally between calls. - flux = self._source.spectrum.main.shape.k.value + ns_events = self._source.spectrum.main.shape.k.value # Always copies - return self._unit_expectation * flux + return self._unit_expectation * ns_events def copy(self) -> "ToyPointSourceResponse": # We are not caching any results, so it's safe to do shallow copy without # re-initializing any member. return copy.copy(self) -class ToyModelFolding(BinnedThreeMLModelFoldingInterface): +class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): def __init__(self, psr: BinnedThreeMLSourceResponseInterface): @@ -156,6 +190,28 @@ def __init__(self, psr: BinnedThreeMLSourceResponseInterface): self._psr = psr self._psr_copies = {} + def ncounts(self) -> float: + + ncounts = 0 + + for source_name,psr in self._psr_copies.items(): + ncounts += psr.ncounts() + + return ncounts + + def expectation_density(self, data: EventDataInterface, copy:bool = True) -> np.ndarray: + + if not isinstance(data, ToyData): + raise TypeError(f"This class only support data of type {ToyData}") + + expectation = np.zeros(data.nevents) + + for source_name, psr in self._psr_copies.items(): + expectation += psr.expectation_density(data, copy=False) + + # Always a copy + return expectation + def set_model(self, model: Model): self._psr_copies = {} @@ -214,7 +270,10 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: #model = Model() # Uncomment for bkg-only hypothesis # Fit -like_fun = PoissonLikelihood() +# Uncomment one. Either one works +#like_fun = PoissonLikelihood() +like_fun = UnbinnedLikelihood() + like_fun.set_data(data) like_fun.set_response(response) like_fun.set_background(bkg) @@ -239,3 +298,19 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: expectation = expectation + bkg.expectation(data) expectation.plot(ax) plt.show() + +# Grid +loglike = Histogram([np.linspace(.9*nevents_signal, 1.1*nevents_signal, 30), np.linspace(.9*nevents_bkg, 1.1*nevents_bkg, 31)], labels = ['s', 'b']) + +for i,s in enumerate(loglike.axes['s'].centers): + for j,b in enumerate(loglike.axes['b'].centers): + + spectrum.k.value = s + cosi.bkg_parameter['norm'].value = b + cosi._update_bkg_parameters() # Fix the need for this line + + loglike[i,j] = cosi.get_log_like() + +loglike.plot() + +plt.show() \ No newline at end of file From 35b52d4ab34dd9926b12f75d7d7c14af7ca50df2 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 25 Jul 2025 17:47:23 -0400 Subject: [PATCH 067/133] Update dwell time map analysis to new signatures developed for the scatt map analysis Signed-off-by: Israel Martinez --- cosipy/response/PointSourceResponse.py | 6 +++--- cosipy/response/threeml_point_source_response.py | 2 +- .../grb/example_grb_fit_threeml_plugin_interfaces.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index d6c4aa3f..fdb1747b 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -128,7 +128,7 @@ def get_expectation(self, spectrum, polarization=None): @classmethod def from_dwell_time_map(cls, - measured_axes:Axes, + data:BinnedDataInterface, response: BinnedInstrumentResponseInterface, exposure_map: HealpixMap, energy_axis: Axis, @@ -142,7 +142,7 @@ def from_dwell_time_map(cls, axes += [polarization_axis] polarization_centers = polarization_axis.centers - axes += list(measured_axes) + axes += list(data.axes) psr = PointSourceResponse(axes, unit=u.cm * u.cm * u.s) @@ -151,7 +151,7 @@ def from_dwell_time_map(cls, coord = exposure_map.pix2skycoord(p) if exposure_map[p] != 0: - psr += response.differential_effective_area(measured_axes, coord, energy_axis.centers, polarization_centers) * exposure_map[p] + psr += response.differential_effective_area(data, coord, energy_axis.centers, polarization_centers) * exposure_map[p] return psr diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index e95083e1..59d5f7b1 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -170,7 +170,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: dwell_time_map = self._sc_ori.get_dwell_map(coord, nside = self._nside) - self._psr = PointSourceResponse.from_dwell_time_map(data.axes, + self._psr = PointSourceResponse.from_dwell_time_map(data, self._response, dwell_time_map, self._energy_axis, diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index 038a636f..0d17edcd 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -112,9 +112,9 @@ def main(): # NOTE: this is currently only implemented for data in local coords psr = BinnedThreeMLPointSourceResponse(instrument_response, sc_history=ori, - direction_axis = data.axes['PsiChi'], energy_axis = dr.axes['Ei'], - polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None) + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside = 2*data.axes['PsiChi'].nside) response = BinnedThreeMLModelFolding(point_source_response = psr) From d1774859ca2d146556ba4690d9ab5ec505727643 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 7 Aug 2025 16:15:28 -0400 Subject: [PATCH 068/133] Use the more general iterables instead of numpy arrays for interfaces Signed-off-by: Israel Martinez --- cosipy/data_io/BinnedData.py | 20 +- cosipy/data_io/UnBinnedData.py | 59 +++ cosipy/image_deconvolution/exposure_table.py | 2 +- cosipy/interfaces/data_interface.py | 79 +++- cosipy/interfaces/event_selection.py | 48 ++ cosipy/interfaces/expectation_interface.py | 34 +- cosipy/interfaces/measurements.py | 281 +++++++++-- cosipy/statistics/likelihood_functions.py | 34 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 440 ++++++++++++++++++ .../examples/toy/toy_interfaces_example.py | 155 ++++-- 10 files changed, 1020 insertions(+), 132 deletions(-) create mode 100644 cosipy/interfaces/event_selection.py create mode 100644 docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py diff --git a/cosipy/data_io/BinnedData.py b/cosipy/data_io/BinnedData.py index b0c72fbd..a49b6346 100644 --- a/cosipy/data_io/BinnedData.py +++ b/cosipy/data_io/BinnedData.py @@ -13,8 +13,7 @@ import astropy.units as u from astropy.coordinates import SkyCoord -from cosipy.interfaces import BinnedDataInterface, QuantityMeasurement, AngleMeasurement, SkyCoordMeasurement -from cosipy.interfaces.data_interface import EventData +from cosipy.interfaces import BinnedDataInterface logger = logging.getLogger(__name__) @@ -537,21 +536,4 @@ def data(self) -> Histogram: def axes(self) -> Axes: return self._data.axes -class EmCDSEventData(EventData): - - def __init__(self, - energy:QuantityMeasurement, - scattering_angle:AngleMeasurement, - scattering_direction:SkyCoordMeasurement): - - if energy.label != "Em": - raise ValueError("The measured energy is expected to be labeled 'Em'") - - if scattering_angle.label != "Phi": - raise ValueError("The scattering angle is expected to be labeled 'Phi'") - - if scattering_direction.label != "PsiChi": - raise ValueError("The scattering direction is expected to be labeled 'PsiChi'") - - super().__init__(energy, scattering_angle, scattering_direction) diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index 078d7065..584f856c 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -1,4 +1,6 @@ # Imports: +from abc import ABC + import numpy as np from astropy.table import Table from astropy.io import fits @@ -22,6 +24,11 @@ import gc import os import time + +from cosipy.interfaces import QuantityMeasurement, AngleMeasurement, SkyCoordMeasurement, CachedQuantityMeasurement, \ + CachedAngleMeasurement, CachedSkyCoordMeasurement, CachedSkyCoordUnitSphericalMeasurement +from cosipy.interfaces.data_interface import EventData + logger = logging.getLogger(__name__) @@ -861,3 +868,55 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): self.write_unbinned_output(output_name) return + + +class MeasuredEnergy(QuantityMeasurement, ABC): + """ + + """ + +class CachedMeasuredEnergy(CachedQuantityMeasurement, MeasuredEnergy): + """ + + """ + +class MeasuredScatteringAngle(AngleMeasurement, ABC): + """ + + """ + +class CachedMeasuredScatteringAngle(CachedAngleMeasurement, MeasuredScatteringAngle): + """ + + """ + + +class MeasuredScatteringDirection(SkyCoordMeasurement, ABC): + """ + + """ + +class CachedMeasuredScatteringDirection(CachedSkyCoordUnitSphericalMeasurement, MeasuredScatteringDirection): + """ + + """ + + +class EmCDSEventData(EventData): + + def __init__(self, + energy:MeasuredEnergy, + scattering_angle:MeasuredScatteringAngle, + scattering_direction:MeasuredScatteringDirection): + + if energy.label != "Em": + raise ValueError("The measured energy is expected to be labeled 'Em'") + + if scattering_angle.label != "Phi": + raise ValueError("The scattering angle is expected to be labeled 'Phi'") + + if scattering_direction.label != "PsiChi": + raise ValueError("The scattering direction is expected to be labeled 'PsiChi'") + + super().__init__(energy, scattering_angle, scattering_direction) + diff --git a/cosipy/image_deconvolution/exposure_table.py b/cosipy/image_deconvolution/exposure_table.py index 15951916..876f58d0 100644 --- a/cosipy/image_deconvolution/exposure_table.py +++ b/cosipy/image_deconvolution/exposure_table.py @@ -305,7 +305,7 @@ def save_as_fits(self, filename, overwrite = False): columns.append(column_healpix_index_x_pointing) column_delta_time = fits.Column(name='delta_time', format='PD()', unit = 's', - array=np.array(self['delta_time'].array, dtype=np.object_)) + array=np.array(self['delta_time'].cache, dtype=np.object_)) columns.append(column_delta_time) column_zpointing_l = fits.Column(name='zpointing_l', format='PD()', unit = 'degree', diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index dff40f4d..3acca023 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,5 +1,7 @@ from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union +import numpy as np +from .event_selection import EventSelectorInterface from histpy import Histogram, Axes from .measurements import Measurement @@ -8,7 +10,8 @@ __all__ = ["DataInterface", "EventDataInterface", - "BinnedDataInterface"] + "BinnedDataInterface", + "EventData"] @runtime_checkable class DataInterface(Protocol): @@ -19,14 +22,17 @@ class DataInterface(Protocol): @runtime_checkable class EventDataInterface(DataInterface, Protocol): - def __getitem__(self, item) -> Union[Tuple, Measurement]: + def __getitem__(self, item:Union[str, int]) -> Union[Tuple, Measurement]: """ If item is: - str: the value of specific measurement for all events - - int: all measurements for an specific event + - int: all measurements for an specific event (whether masked or unmasked) """ - def __iter__(self) -> Iterator[Tuple]:... + def __iter__(self) -> Iterator[Tuple]: + """ + Only loops through unmasked values + """ @property def nevents(self) -> int:... @@ -35,14 +41,31 @@ def nevents(self) -> int:... def labels(self) -> Tuple[str]:... @property - def types(self) -> Tuple[type]:... + def nmeasurements(self) -> int: + """ + Number of Measurements. Each measurement can potentially have more tha one value + --e.g. RA,Dec can be considered a single measurement + """ + + def set_selection(self, selection: Union[EventSelectorInterface, None]) -> None: + """ + None would drop the selection. Implementation might not implement the ability to drop + a selection when the underlying data was discarded for efficiency reasons. + """ + + @property + def selection(self) -> Union[EventSelectorInterface, None]: + """ + The current selection set + """ @property - def nvars(self) -> int:... + def nselected(self) -> int:... + class EventData(EventDataInterface): """ - Generic event data from measurement + Generic event data from measurement set """ def __init__(self, *data:Measurement): @@ -59,12 +82,12 @@ def __init__(self, *data:Measurement): raise ValueError("All measurement arrays must have the same size") self._nevents = size + self._nselected = size self._events = data self._labels = tuple([d.label for d in data]) - self._types = tuple([type(d) for d in data]) - self._value_types = tuple([d.value_type for d in data]) + self._selection = None - def __getitem__(self, item): + def __getitem__(self, item:[Union[str, int]]) -> Union[Tuple, Measurement]: if isinstance(item, str): return self._events[self._labels.index(item)] @@ -73,8 +96,8 @@ def __getitem__(self, item): else: raise TypeError("Index must be either a measurement label or an entry position.") - def __iter__(self): - return zip(self._events) + def __iter__(self) -> Iterator[Tuple]: + return zip(*self._events) @property def nevents(self): @@ -85,16 +108,34 @@ def labels(self): return self._labels @property - def types(self): - return self._types + def nmeasurements(self) -> int: + return len(self._events) - @property - def value_types(self): - return self._value_types + def set_selection(self, selection:EventSelectorInterface) -> None: + + if selection is None: + self._selection = None + self._nselected = self._nevents + else: + + self._selection = selection + + # Signals the need to recompute this number + self._nselected = -1 @property - def nvars(self) -> int: - return len(self._events) + def selection(self) -> EventSelectorInterface: + return self._selection + + def nselected(self) -> int: + + if self._nselected == -1: + # Not yet cached since last set selection + self._nselected = sum(self._selection.select(self)) + + return self._nselected + + @runtime_checkable class BinnedDataInterface(DataInterface, Protocol): diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py new file mode 100644 index 00000000..a2b14ed0 --- /dev/null +++ b/cosipy/interfaces/event_selection.py @@ -0,0 +1,48 @@ +from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + # Guard to prevent circular import + from .data_interface import EventDataInterface + +@runtime_checkable +class EventWeightingInterface(Protocol): + """ + 3 calling mechanism + + 1. + weights.set_data(data) + weights.weight() + + In this case weight() will call iter(data) + + 2. + weights.weight(data) + + In this case weight() will first call set_data(data) (if needed), and then iter(data). + + 3. + weights.set_data(data) + weights.weight(iterator) + + This prevents weight() from calling iter(data). However, it is assumed that + iterator is equivalent to iter(data). This allows to use cached versions + of the iterator or itertools.tee. + """ + + def set_data(self, data:'EventDataInterface'):... + + def weight(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]:... + +@runtime_checkable +class EventSelectorInterface(EventWeightingInterface, Protocol): + + def set_data(self, data:'EventDataInterface'):... + + def select(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[bool]: + """ + Returns True to keep an event, False to filter it out. + """ + + def weight(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]: + return self.select(data) diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 1a0468fb..921e90f2 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -1,4 +1,4 @@ -from typing import Protocol, runtime_checkable, Dict, Any +from typing import Protocol, runtime_checkable, Dict, Any, Generator, Iterable, Optional, Union, Iterator import histpy import numpy as np @@ -32,18 +32,36 @@ def expectation(self, data:BinnedDataInterface, copy:bool)->histpy.Histogram: @runtime_checkable class ExpectationDensityInterface(ExpectationInterface, Protocol): - def ncounts(self) -> float:... - def expectation_density(self, data:EventDataInterface, copy:bool) -> np.ndarray: - """ + """ + 3 calling mechanisms + + 1. + expectation.set_data(data) + expectation.expectation_density() + + In this case expectation_density() will call iter(data) + + 2. + expectation.expectation_density(data) + + In this case expectation_density() will first call set_data(data) (if needed), and then iter(data). + + 3. + expectation.set_data(data) + expectation.expectation_density(iterator) + This prevents expectation_density() from calling iter(data). However, it is assumed that + iterator is equivalent to iter(data). This allows to use cached versions + of the iterator or itertools.tee. + """ + def set_data(self, data:EventDataInterface):... + def ncounts(self) -> float:... + def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]: + """ Parameters ---------- data - copy: - If True, it will return an array that the user if free to modify. - Otherwise, it will result a reference, possible to the cache, that - the user should not modify Returns ------- diff --git a/cosipy/interfaces/measurements.py b/cosipy/interfaces/measurements.py index f135a5d9..2677cb51 100644 --- a/cosipy/interfaces/measurements.py +++ b/cosipy/interfaces/measurements.py @@ -1,92 +1,283 @@ +import itertools from abc import ABC, abstractmethod -from typing import Tuple, Type, TypeVar, Generic, ClassVar +from collections.abc import Iterator, Sequence +from typing import Tuple, Type, TypeVar, Generic, ClassVar, Union, Generator, Iterable import numpy as np -from astropy.coordinates import SkyCoord, Angle -from astropy.units import Quantity +from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, UnitSphericalRepresentation, \ + CartesianRepresentation +from astropy.units import Quantity, Unit +from numpy.typing import NDArray -class Measurement(ABC): +class MeasurementIterator(Iterator): + + def __next__(self) -> Union[int, float, Tuple[Union[int, float]]]:... + +class Measurement(Sequence): def __init__(self, label:str, *args, **kwargs): self._label = label + # Needs __len__ and either __iter__ or __getitem__ (or both) + @property def label(self) -> str: return self._label @property - @abstractmethod - def size(self) -> int:... + def size(self) -> int: + return len(self) @property @abstractmethod - def value_type(self) -> Type:... + def value_type(self) -> Union[Type, Tuple[Type]]: + """ + Types return by __iter__ and __getitem__ + """ - @abstractmethod - def __getitem__(self, item:int):... + @property + def nvalues(self) -> int: + if isinstance(self.value_type, tuple): + return len(self.value_type) + else: + return 1 + + def cache(self, start=None, stop=None, step=None) -> Iterable: + values = [] + for value in itertools.islice(self, start, stop, step): + values.append(value) + + return values + +class FloatingMeasurement(Measurement, ABC): + + @property + def value_type(self) -> Union[Type, Tuple[Type]]: + return float + + def cache(self, start=None, stop=None, step=None) -> NDArray[float]: + values = super().cache(start, stop, step) + return np.asarray(values) + +class CachedFloatingMeasurement(FloatingMeasurement): + + def __init__(self, label:str, array: np.ndarray[float]): + if array.ndim != 1: + raise ValueError("This class handles 1D and only 1D arrays") + + super().__init__(label) + self._array = array + + def __len__(self): + return self._array.size + + def __iter__(self): + return iter(self._array) + + def __getitem__(self, item): + return self._array[item] + + def cache(self, start=None, stop=None, step=None) -> np.ndarray[float]: + return self._array[start, stop, step] + +class QuantityMeasurement(FloatingMeasurement, ABC): + + @property @abstractmethod - def __iter__(self):... + def unit(self) -> Unit:... + + def fancy_iter(self, start = None, stop = None, step = None) -> Generator[Quantity, None, None]: + for value in itertools.islice(self, start, stop, step): + yield Quantity(value, self.unit) + + def cache(self, start = None, stop = None, step = None) -> Quantity: + return Quantity(super().cache(start, stop, step), self.unit) + + +class CachedQuantityMeasurement(CachedFloatingMeasurement, QuantityMeasurement): + + def __init__(self, label:str, array: Quantity): + if array.ndim != 1: + raise ValueError("This class handles 1D and only 1D arrays") + + super().__init__(label, array.value) + self._array = array.value + self._unit = array.unit + + def unit(self) -> Unit: + return self._unit + + def cache(self, start = None, stop = None, step = None) -> Quantity: + return Quantity(self._array[start:stop:step], self.unit) -T = TypeVar('T') -t = TypeVar('t') + def fancy_iter(self, start=None, stop=None, step=None) -> Generator[Quantity, None, None]: + return iter(self.cache) -class ArrayLikeMeasurement(Measurement, Generic[T,t]): +class AngleMeasurement(QuantityMeasurement, ABC): + + def fancy_iter(self, start=None, stop=None, step=None) -> Generator[Quantity, None, None]: + for value in itertools.islice(self, start, stop, step): + yield Angle(value, self.unit) + + def cache(self, start=None, stop=None, step=None) -> Angle: + return Angle(super().cache(start, stop, step), self.unit) + +class CachedAngleMeasurement(CachedQuantityMeasurement): + + def __init__(self, label:str, array: Angle): + super().__init__(label, Quantity(array.value, array.unit)) + + def cache(self, start = None, stop = None, step = None) -> Quantity: + return Angle(self._array[start:stop:step], self.unit) + +class SkyCoordMeasurement(Measurement, ABC): """ - Data already implements and iterable, [] and size + """ - _value_type = ClassVar[type] + @property + @abstractmethod + def frame(self) -> BaseCoordinateFrame:... + @property + @abstractmethod + def unit(self) -> Unit:... + + def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement':... + def as_cartesian(self) -> 'SkyCoordCartesianMeasurement':... + +class CachedSkyCoordMeasurement(SkyCoordMeasurement, ABC): - def __init__(self, data:T, label:str, *args, **kwargs): + def __init__(self, label: str, coord: SkyCoord): - self._data = data super().__init__(label) + if coord.ndim != 1: + raise ValueError("This class handles 1D and only 1D SkyCoord arrays") + + self._frame = coord.frame + + self._unit = None # Set by child class. Type Unit + self._data = None # Set by child class. array of shape (self.size,self.nvalues) + + def __len__(self): + return self._data.shape[0] + @property - def size(self) -> int: - return self._data.size + def frame(self) -> BaseCoordinateFrame: + return self._unit @property - def value_type(self) -> Type: - return self._value_type + def unit(self) -> Unit: + return self._unit + + def __iter__(self): + return iter(self._data) - def __getitem__(self, item:int) -> t: + def __getitem__(self, item): return self._data[item] - def __iter__(self) -> t: - return self._data.__iter__() + @abstractmethod + def cache(self, start=None, stop=None, step=None) -> SkyCoord:... + + def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: + return iter(self.cache) + + +class SkyCoordUnitSphericalMeasurement(SkyCoordMeasurement, ABC): @property - def data(self) -> T: - return self._data + def value_type(self) -> Union[Type, Tuple[Type]]: + return (float, float) + def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: + for lon,lat in itertools.islice(self, start, stop, step): + yield SkyCoord(lon, lat, unit = self.unit, frame = self.frame) -class QuantityMeasurement(ArrayLikeMeasurement[Quantity, Quantity]): - """ - """ - _value_type = Quantity + def cache(self, start=None, stop=None, step=None) -> SkyCoord: + lon = [] + lat = [] + for lon_i,lat_i in itertools.islice(self, start, stop, step): + lon.append(lon_i) + lat.append(lat_i) -class SkyCoordMeasurement(ArrayLikeMeasurement[SkyCoord, SkyCoord]): - """ + return SkyCoord(lon, lat, unit = self.unit, frame = self.frame) - """ - _value_type = SkyCoord + def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement': + return self -class AngleMeasurement(ArrayLikeMeasurement[Angle, Angle]): - """ + def as_cartesian(self) -> 'SkyCoordCartesianMeasurement':... - """ - _value_type = Angle -class FloatingMeasurement(ArrayLikeMeasurement[np.ndarray, np.floating]): +class CachedSkyCoordUnitSphericalMeasurement(CachedSkyCoordMeasurement, SkyCoordUnitSphericalMeasurement): + + def __init__(self, label:str, coord: SkyCoord): + + super().__init__(label, coord) + + rep = coord.represent_as('unitspherical') + + self._unit = rep.unit + self._data = np.asarray([rep.lon.value, rep.lat.value]).transpose() + + def cache(self, start=None, stop=None, step=None) -> SkyCoord: + return SkyCoord(self._data[start:stop:step,0], self._data[start:stop:step,1], unit = self.unit, frame = self.frame) + + def as_cartesian(self) -> 'SkyCoordCartesianMeasurement': + rep = UnitSphericalRepresentation(Quantity(self._data[:,0], self.unit), Quantity(self._data[:,1], self.unit)) + cart_rep = rep.represent_as('cartesian') + + coord = SkyCoord(x = cart_rep.x, y = cart_rep.y, z = cart_rep.z, frame = self.frame, representation_type = 'cartesian') + + return CachedSkyCoordCartesianMeasurement(self.label, coord) + + +class SkyCoordCartesianMeasurement(SkyCoordMeasurement, ABC): + + @property + def value_type(self) -> Union[Type, Tuple[Type]]: + return (float, float, float) + + def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: + for x,y,z in itertools.islice(self, start, stop, step): + yield SkyCoord(x=x,y=y,z=z, unit = self.unit, frame = self.frame(), representation_type='cartesian') + + def cache(self, start=None, stop=None, step=None) -> SkyCoord: + x = [] + y = [] + z = [] + for x_i,y_i,z_i in itertools.islice(self, start, stop, step): + x.append(x_i) + y.append(y_i) + z.append(z_i) + + return SkyCoord(x=x,y=y,z=z, unit = self.unit, frame = self.frame(), representation_type='cartesian') + + def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement':... + + def as_cartesian(self) -> 'SkyCoordCartesianMeasurement': + return self + + +class CachedSkyCoordCartesianMeasurement(SkyCoordCartesianMeasurement): + + def __init__(self, label:str, coord: SkyCoord): + super().__init__(label, coord) + + rep = coord.represent_as('cartesian') + + self._unit = rep.unit + + self._data = np.asarray([rep.x.value, rep.y.value, rep.z.value]).transpose() - _value_type = np.floating + def cache(self, start=None, stop=None, step=None) -> SkyCoord: + return SkyCoord(x = self._data[start:stop:step, 0], y = self._data[start:stop:step, 1], z = self._data[start:stop:step, 2], + unit = self.unit, frame = self.frame, representation_type = 'cartesian') - def __init__(self, data: np.ndarray, label: str, *args, **kwargs): + def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement': + rep = CartesianRepresentation(self._data[:,0], self._data[:,1], self._data[:,2]) + usph_rep = rep.represent_as('unitspherical') - if not np.issubdtype(data.dtype, np.floating): - raise TypeError("This class expect float or double types") + coord = SkyCoord(usph_rep.lon, usph_rep.lat, frame = self.frame) - self._data = data - super().__init__(data, label) + return CachedSkyCoordUnitSphericalMeasurement(self.label, coord) \ No newline at end of file diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 05fa63e0..f2b3f519 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -1,4 +1,6 @@ +import itertools import logging +import operator from cosipy.interfaces.expectation_interface import ExpectationInterface, ExpectationDensityInterface @@ -32,10 +34,20 @@ def set_response(self, response: ExpectationInterface): super().set_response(response) # Checks type self._response = response + if self._data is None: + raise RuntimeError("Call set_data() before calling set_response()") + + self._response.set_data(self._data) + def set_background(self, bkg: BackgroundInterface): super().set_background(bkg) # Checks type self._bkg = bkg + if self._data is None: + raise RuntimeError("Call set_data() before calling set_background()") + + self._bkg.set_data(self._data) + @property def data (self) -> EventDataInterface: return self._data @property @@ -64,18 +76,26 @@ def get_log_like(self) -> float: ntot = self._response.ncounts() - # If we don't have background, we won't modify the expectation, so - # it's safe to use the internal cache. - density = self._response.expectation_density(self._data, copy = self.has_bkg) - if self.has_bkg: ntot += self._bkg.ncounts() - # We won't modify the bkg expectation, so it's safe to use the internal cache - density += self._bkg.expectation_density(self._data, copy = False) + # Prevent 2 iteration over data using tee() + data_iter_1, data_iter_2 = itertools.tee(self._data, 2) + + signal_density = self._response.expectation_density(data_iter_1) + bkg_density = self._bkg.expectation_density(data_iter_2) + + density = np.fromiter(map(operator.add, signal_density, bkg_density), dtype=float) + + # signal_density = np.fromiter(self._response.expectation_density(), dtype=float) + # bkg_density = np.fromiter(self._bkg.expectation_density(), dtype=float) + # + # density = signal_density + bkg_density + + else: + density = np.fromiter(self._response.expectation_density(), dtype=float) - # Compute the log-likelihood: log_like = np.sum(np.log(density)) - ntot return log_like diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py new file mode 100644 index 00000000..831d9b88 --- /dev/null +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python +# coding: utf-8 + +# # Spectral fitting example (Crab) + +# **To run this, you need the following files, which can be downloaded using the first few cells of this notebook:** +# - orientation file (20280301_3_month_with_orbital_info.ori) +# - binned data (crab_bkg_binned_data.hdf5, crab_binned_data.hdf5, & bkg_binned_data.hdf5) +# - detector response (SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5) +# +# **The binned data are simulations of the Crab Nebula and albedo photon background produced using the COSI SMEX mass model. The detector response needs to be unzipped before running the notebook.** + +# This notebook fits the spectrum of a Crab simulated using MEGAlib and combined with background. +# +# [3ML](https://threeml.readthedocs.io/) is a high-level interface that allows multiple datasets from different instruments to be used coherently to fit the parameters of source model. A source model typically consists of a list of sources with parametrized spectral shapes, sky locations and, for extended sources, shape. Polarization is also possible. A "coherent" analysis, in this context, means that the source model parameters are fitted using all available datasets simultanously, rather than performing individual fits and finding a well-suited common model a posteriori. +# +# In order for a dataset to be included in 3ML, each instrument needs to provide a "plugin". Each plugin is responsible for reading the data, convolving the source model (provided by 3ML) with the instrument response, and returning a likelihood. In our case, we'll compute a binned Poisson likelihood: +# +# $$ +# \log \mathcal{L}(\mathbf{x}) = \sum_i \log \frac{\lambda_i(\mathbf{x})^{d_i} \exp (-\lambda_i)}{d_i!} +# $$ +# +# where $d_i$ are the counts on each bin and $\lambda_i$ are the expected counts given a source model with parameters $\mathbf{x}$. +# +# In this example, we will fit a single point source with a known location. We'll assume the background is known and fixed up to a scaling factor. Finally, we will fit a Band function: +# +# $$ +# f(x) = K \begin{cases} \left(\frac{x}{E_{piv}}\right)^{\alpha} \exp \left(-\frac{(2+\alpha) +# * x}{x_{p}}\right) & x \leq (\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \\ \left(\frac{x}{E_{piv}}\right)^{\beta} +# * \exp (\beta-\alpha)\left[\frac{(\alpha-\beta) x_{p}}{E_{piv}(2+\alpha)}\right]^{\alpha-\beta} +# * &x>(\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \end{cases} +# $$ +# +# where $K$ (normalization), $\alpha$ & $\beta$ (spectral indeces), and $x_p$ (peak energy) are the free parameters, while $E_{piv}$ is the pivot energy which is fixed (and arbitrary). +# +# Considering these assumptions: +# +# $$ +# \lambda_i(\mathbf{x}) = B*b_i + s_i(\mathbf{x}) +# $$ +# +# where $B*b_i$ are the estimated counts due to background in each bin with $B$ the amplitude and $b_i$ the shape of the background, and $s_i$ are the corresponding expected counts from the source, the goal is then to find the values of $\mathbf{x} = [K, \alpha, \beta, x_p]$ and $B$ that maximize $\mathcal{L}$. These are the best estimations of the parameters. +# +# The final module needs to also fit the time-dependent background, handle multiple point-like and extended sources, as well as all the spectral models supported by 3ML. Eventually, it will also fit the polarization angle. However, this simple example already contains all the necessary pieces to do a fit. + +# In[1]: + + +from cosipy import test_data, BinnedData, UnBinnedData +from cosipy.spacecraftfile import SpacecraftHistory +from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.util import fetch_wasabi_file + +from cosipy.statistics import PoissonLikelihood +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse + +import sys + +from scoords import SpacecraftFrame + +from astropy.time import Time +import astropy.units as u +from astropy.coordinates import SkyCoord, Galactic + +import numpy as np +import matplotlib.pyplot as plt + +from threeML import Band, PointSource, Model, JointLikelihood, DataList +from astromodels import Parameter + +from pathlib import Path + +import os + +def main(): + + # ## Download and read in binned data + + # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into + + # In[2]: + + + data_path = Path("") # /path/to/files. Current dir by default + + + # Download the orientation file + + # In[ ]: + + fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', + output=str(data_path / 'DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori'), + checksum='e5e71e3528e39b855b0e4f74a1a2eebe') + + # Download the unbinned Crab data + + # In[5]: + + + fetch_wasabi_file('COSI-SMEX/DC3/Data/Sources/crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz', output=str(data_path / 'crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz'), checksum = '1d73e7b9e46e51215738075e91a52632') + + + + # Read in the spacecraft orientation file + + # In[4]: + + + sc_orientation = SpacecraftHistory.open(data_path / "DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori") + + + + # Read data + + UnBinnedData + + # Create BinnedData objects for the Crab only, Crab+background, and background only. The Crab only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation + + # In[5]: + + + crab = BinnedData(data_path / "crab.yaml") + crab_bkg = BinnedData(data_path / "crab.yaml") + bkg = BinnedData(data_path / "background.yaml") + + + # Load binned .hdf5 files + + # In[6]: + + + crab.load_binned_data_from_hdf5(binned_data=data_path / "crab_binned_data.hdf5") + crab_bkg.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") + bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") + + + # Define the path to the detector response + + # In[7]: + + + # Before and after Jeremy's changes + dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") # path to detector response + #dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response + + + # ## Perform spectral fit + + # ============ Interfaces ============== + + output_suffix = 'interfaces' + + dr = FullDetectorResponse.open(dr) + instrument_response = BinnedInstrumentResponse(dr) + + # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin + + # In[8]: + + bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + bkg_dist += sys.float_info.min + + data = crab_bkg.get_em_cds() + + bkg = FreeNormBinnedBackground(bkg_dist) + + # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, + # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. + # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation + # can provide the response for an arbitrary directions, Ei and Pol values. + # NOTE: this is currently only implemented for data in local coords + psr = BinnedThreeMLPointSourceResponse(instrument_response, + sc_history=sc_orientation, + energy_axis = dr.axes['Ei'], + polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside = 2*data.axes['PsiChi'].nside) + + ##==== + + + response = BinnedThreeMLModelFolding(point_source_response = psr) + + like_fun = PoissonLikelihood() + like_fun.set_data(data) + like_fun.set_response(response) + like_fun.set_background(bkg) + + cosi = ThreeMLPluginInterface('cosi', like_fun) + + # Nuisance parameter guess, bounds, etc. + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + 1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value=5, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + + # ======== Interfaces end ========== + + # Define a point source at the known location with a Band function spectrum and add it to the model. The initial values of the Band function parameters are set to the true values used to simulate the source + + + # In[9]: + + + l = 184.56 + b = -5.78 + + alpha = -1.99 + beta = -2.32 + E0 = 531. * (alpha - beta) * u.keV + xp = E0 * (alpha + 2) / (alpha - beta) + piv = 500. * u.keV + K = 3.07e-5 / u.cm / u.cm / u.s / u.keV + + spectrum = Band() + + spectrum.alpha.min_value = -2.14 + spectrum.alpha.max_value = 3.0 + spectrum.beta.min_value = -5.0 + spectrum.beta.max_value = -2.15 + spectrum.xp.min_value = 1.0 + + spectrum.alpha.value = alpha + spectrum.beta.value = beta + spectrum.xp.value = xp.value + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.xp.unit = xp.unit + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.alpha.delta = 0.01 + spectrum.beta.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l = l, # Longitude (deg) + b = b, # Latitude (deg) + spectral_shape = spectrum) # Spectral model + + # Optional: free the position parameters + #source.position.l.free = True + #source.position.b.free = True + + model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + + # Optional: if you want to call get_log_like manually, then you also need to set the model manually + # 3ML does this internally during the fit though + cosi.set_model(model) + + + # Gather all plugins and combine with the model in a JointLikelihood object, then perform maximum likelihood fit + + # In[10]: + + + plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) + + like = JointLikelihood(model, plugins, verbose = False) + + like.fit() + + + # ## Error propagation and plotting (Band function) + + # Define Band function spectrum injected into MEGAlib + + # In[11]: + + ## Injected + + l = 184.56 + b = -5.78 + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + # Expectation for injected source + source_inj = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum_inj) # Spectral model + + psr.set_source(source_inj) + expectation_inj = psr.expectation(data, copy=True) + + + # The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the "evaluate_at" method of the spectrum + + # In[12]: + + + results = like.results + + + print(results.display()) + + parameters = {par.name:results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + print(results.optimized_model["source"]) + + # Evaluate the flux and errors at a range of energies for the fitted and injected spectra, and the simulated source flux + + # In[13]: + + + energy = np.geomspace(100*u.keV,10*u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + binned_energy_edges = crab.binned_data.axes['Em'].edges.value + binned_energy = np.array([]) + bin_sizes = np.array([]) + + for i in range(len(binned_energy_edges)-1): + binned_energy = np.append(binned_energy, (binned_energy_edges[i+1] + binned_energy_edges[i]) / 2) + bin_sizes = np.append(bin_sizes, binned_energy_edges[i+1] - binned_energy_edges[i]) + + expectation = response.expectation(data, copy = True) + + + # Plot the fitted and injected spectra + + # In[14]: + + + fig,ax = plt.subplots() + + ax.plot(energy, energy*energy*flux_median, label = "Best fit") + ax.fill_between(energy, energy*energy*flux_lo, energy*energy*flux_hi, alpha = .5, label = "Best fit (errors)") + ax.plot(energy, energy*energy*flux_inj, color = 'black', ls = ":", label = "Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + plt.show() + + # Plot the fitted spectrum convolved with the response, as well as the simulated source counts + + # In[15]: + + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response") + ax.stairs(expectation_inj.project('Em').todense().contents, binned_energy_edges, color='blue', label = "Injected spectrum convolved with response") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(crab.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Source counts") + ax.errorbar(binned_energy, crab.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + plt.show() + + + # Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts + + # In[16]: + + expectation_bkg = bkg.expectation(data, copy = True) + + fig,ax = plt.subplots() + + ax.stairs(expectation.project('Em').todense().contents + expectation_bkg.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response plus background") + ax.errorbar(binned_energy, expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) + ax.stairs(crab_bkg.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") + ax.errorbar(binned_energy, crab_bkg.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab_bkg.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel("Counts") + + ax.legend() + + plt.show() + + +if __name__ == "__main__": + + import cProfile + cProfile.run('main()', filename = "prof_interfaces.prof") + exit() + + main() \ No newline at end of file diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 124e45e6..4f4494fe 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -1,4 +1,5 @@ -from typing import Dict, Any +import itertools +from typing import Dict, Any, Generator, Iterator, Iterable, Optional, Union from astromodels.sources import Source from astromodels import LinearPolarization, SpectralComponent, Parameter @@ -46,6 +47,54 @@ toy_axis = Axis(np.linspace(-5, 5)) nevents_signal = 1000 nevents_bkg = 1000 +nevents_tot = nevents_signal + nevents_bkg + +class ToyMeasurementIterator(Iterator): + # Random data. Normal signal on top of uniform bkg + # Keeps track of initial random seed + + def __init__(self, iterable: 'ToyMeasurement'): + self._iter = iterable + self._rng = np.random.default_rng() + + # Restart + self._rng.__setstate__(self._iter._rng_init) + self._pos = 0 + + def __next__(self): + if self._pos >= nevents_tot: + raise StopIteration + + self._pos += 1 + + if self._rng.uniform(0, nevents_tot) < nevents_signal: + return self._rng.normal() + else: + return self._rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim) + + +class ToyMeasurement(FloatingMeasurement): + + def __init__(self, label): + + super().__init__(label) + + # Keep track of init seed to allow iterator + self._rng_init = np.random.default_rng().__getstate__() + + def __iter__(self): + return ToyMeasurementIterator(self) + + def __getitem__(self, item): + # This is inefficient unless the implementation caches the values + with iter(self) as i: + for _ in range(item): + next(i) + + return next(i) + + def __len__(self): + return nevents_tot class ToyData(BinnedDataInterface, EventData): # Random data. Normal signal on top of uniform bkg @@ -54,23 +103,13 @@ class ToyData(BinnedDataInterface, EventData): # code readability, especially if you use an IDE. def __init__(self): - self._data = Histogram(toy_axis) - - # Signal - event_data = norm.rvs(size=nevents_signal) - - # Bkg - bkg_event_data = uniform.rvs(toy_axis.lo_lim, toy_axis.hi_lim-toy_axis.lo_lim, size=nevents_bkg) - - # Join - event_data = np.append(event_data, bkg_event_data) + # Unbinned + measurements = ToyMeasurement('x') + EventData.__init__(self, measurements) # Binned - self._data.fill(event_data) - - #Unbinned - measurements = FloatingMeasurement(event_data, 'x') - EventData.__init__(self, measurements) + self._data = Histogram(toy_axis) + self._data.fill(np.asarray(measurements)) @property def data(self) -> Histogram: @@ -90,8 +129,10 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 + self._event_data = None + # Doesn't need to be normalized - self._unit_expectation_density = np.broadcast_to(1/(toy_axis.hi_lim - toy_axis.lo_lim), data.nevents) + self._unit_expectation_density = 1/(toy_axis.hi_lim - toy_axis.lo_lim) def set_parameters(self, **parameters:u.Quantity) -> None: self._norm = parameters['norm'].value @@ -99,9 +140,29 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm - def expectation_density(self, data: EventDataInterface, copy: bool = True) -> np.ndarray: - #Always a copy - return self._norm*self._unit_expectation_density + def set_data(self, data: EventDataInterface) -> None: + if not isinstance(data, ToyData): + raise TypeError(f"This class only support data of type {ToyData}") + + self._event_data = data + + def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]] = None) -> Iterable[float]: + if data is None: + + if self._event_data is None: + raise RuntimeError("You need to either provide the data or call set_data() first.") + + data = self._event_data + + elif isinstance(data, EventDataInterface): + + # Runs some checks + self.set_data(data) + + density = self._norm * self._unit_expectation_density + + for _ in data: + yield density @property def parameters(self) -> Dict[str, u.Quantity]: @@ -139,16 +200,33 @@ def ncounts(self) -> float: ns_events = self._source.spectrum.main.shape.k.value return ns_events - def expectation_density(self, data:EventDataInterface, copy:bool) -> np.ndarray: - + def set_data(self, data:EventDataInterface): if not isinstance(data, ToyData): raise TypeError(f"This class only support data of type {ToyData}") + self._event_data = data + + def expectation_density(self, data:Optional[Union[EventDataInterface, Iterator]] = None) -> Iterable[float]: + + if data is None: + + if self._event_data is None: + raise RuntimeError("You need to either provide the data or call set_data() first.") + + data = self._event_data + + elif isinstance(data, EventDataInterface): + + # Runs some checks + self.set_data(data) + # I expect in the real case it'll be more efficient to compute # (ncounts, ncounts*prob) than (ncounts, prob) - # Always copies - return self.ncounts()*norm.pdf(data['x'].data) + cache = self.ncounts()*norm.pdf([x for x, in data]) + + for n in cache: + yield n def set_source(self, source: Source): @@ -199,18 +277,28 @@ def ncounts(self) -> float: return ncounts - def expectation_density(self, data: EventDataInterface, copy:bool = True) -> np.ndarray: - + def set_data(self, data:EventDataInterface): if not isinstance(data, ToyData): raise TypeError(f"This class only support data of type {ToyData}") - expectation = np.zeros(data.nevents) + self._event_data = data - for source_name, psr in self._psr_copies.items(): - expectation += psr.expectation_density(data, copy=False) + def expectation_density(self, data: EventDataInterface = None) -> Iterable[float]: - # Always a copy - return expectation + if data is None: + + if self._event_data is None: + raise RuntimeError("You need to either provide the data or call set_data() first.") + + data = self._event_data + + elif isinstance(data, EventDataInterface): + + # Runs some checks + self.set_data(data) + + for expectations in zip(*[p.expectation_density(d) for p,d in zip(self._psr_copies.values(), itertools.tee(data))]): + yield np.sum(expectations) def set_model(self, model: Model): @@ -278,14 +366,15 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: like_fun.set_response(response) like_fun.set_background(bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) -plugins = DataList(cosi) -like = JointLikelihood(model, plugins) # Before the fit, you can set the parameters initial values, bounds, etc. # This is passed to the minimizer. # In addition to model. Nuisance. cosi.bkg_parameter['norm'].value = 1 +plugins = DataList(cosi) +like = JointLikelihood(model, plugins) + # Run minimizer like.fit() print(like.minimizer) From 217979b4ce95a9b5b31f58110393cae573c75399 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 25 Sep 2025 16:05:53 -0400 Subject: [PATCH 069/133] Use set_data() instead of expectations(data). The main motivation is to allow easier caching in the unbinned case while keeping the same API as the binned. It's also nice that all interface input now start with set_, while the implementation inputs are part of the init Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 12 ++- cosipy/interfaces/background_interface.py | 4 +- cosipy/interfaces/expectation_interface.py | 11 +- .../response/threeml_point_source_response.py | 44 ++++---- cosipy/response/threeml_response.py | 102 +++++++++++------- cosipy/statistics/likelihood_functions.py | 9 +- ...mple_crab_fit_threeml_plugin_interfaces.py | 4 +- .../examples/toy/toy_interfaces_example.py | 99 ++++++++++++----- 8 files changed, 184 insertions(+), 101 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 6cb0fc1d..a53cd6c1 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -7,7 +7,7 @@ from astropy import units as u -from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface +from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface, DataInterface __all__ = ["FreeNormBinnedBackground"] @@ -116,12 +116,16 @@ def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: def parameters(self) -> Dict[str, u.Quantity]: return {l:u.Quantity(n) for l,n in self.norms.items()} - def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: + def set_data(self, data: DataInterface): + + if data.axes != self.meausured_axes: + raise ValueError("Requested axes do not match the background component axes") + + def expectation(self, copy:bool = True)->Histogram: """ Parameters ---------- - data copy: If True, it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that @@ -132,8 +136,6 @@ def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: """ - if data.axes != self.meausured_axes: - raise ValueError("Requested axes do not match the background component axes") # Check if we can use the cache if self._expectation is None: diff --git a/cosipy/interfaces/background_interface.py b/cosipy/interfaces/background_interface.py index 8c8cf060..434163d0 100644 --- a/cosipy/interfaces/background_interface.py +++ b/cosipy/interfaces/background_interface.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface +from .expectation_interface import BinnedExpectationInterface, ExpectationDensityInterface, ExpectationInterface __all__ = [ "BackgroundInterface", @@ -19,7 +19,7 @@ ] @runtime_checkable -class BackgroundInterface(Protocol): +class BackgroundInterface(ExpectationInterface, Protocol): def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None:... @property def parameters(self) -> Dict[str, u.Quantity]:... diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 921e90f2..6d670eb8 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -2,7 +2,7 @@ import histpy import numpy as np -from cosipy.interfaces import BinnedDataInterface, EventDataInterface +from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface __all__ = [ "ExpectationDensityInterface", @@ -10,18 +10,18 @@ ] @runtime_checkable -class ExpectationInterface(Protocol):... +class ExpectationInterface(Protocol): + def set_data(self, data: DataInterface):... @runtime_checkable class BinnedExpectationInterface(ExpectationInterface, Protocol): - def expectation(self, data:BinnedDataInterface, copy:bool)->histpy.Histogram: + def expectation(self, copy: Optional[bool])->histpy.Histogram: """ Parameters ---------- - data copy: - If True, it will return an array that the user if free to modify. + If True (default), it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that the user should not modify @@ -55,7 +55,6 @@ class ExpectationDensityInterface(ExpectationInterface, Protocol): of the iterator or itertools.tee. """ - def set_data(self, data:EventDataInterface):... def ncounts(self) -> float:... def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]: """ diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 59d5f7b1..2c46b699 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -16,7 +16,7 @@ from astromodels.sources import Source, PointSource from scoords import SpacecraftFrame from histpy import Axes, Histogram, Axis, HealpixAxis -from cosipy.interfaces import BinnedThreeMLSourceResponseInterface, BinnedDataInterface +from cosipy.interfaces import BinnedThreeMLSourceResponseInterface, BinnedDataInterface, DataInterface from cosipy.response import FullDetectorResponse, PointSourceResponse from cosipy.spacecraftfile import SpacecraftHistory, SpacecraftAttitudeMap @@ -68,11 +68,19 @@ def __init__(self, # TODO: FullDetectorResponse -> BinnedInstrumentResponseInterface - self._sc_ori = sc_history - # Use setters for these + # Interface inputs + self._data = None self._source = None + # Other implementation inputs + self._sc_ori = sc_history + self._response = instrument_response + self._energy_axis = energy_axis + self._polarization_axis = polarization_axis + self._nside = nside + + # Cache # Prevent unnecessary calculations and new memory allocations # See this issue for the caveats of comparing models @@ -89,12 +97,6 @@ def __init__(self, self._psr = None - self._response = instrument_response - self._energy_axis = energy_axis - self._polarization_axis = polarization_axis - - self._nside = nside - def clear_cache(self): self._source = None @@ -129,18 +131,22 @@ def set_source(self, source: Source): self._source = source - def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: - # TODO: check coordsys from axis - # TODO: Earth occ always true in this case + def set_data(self, data: DataInterface): if not isinstance(data, EmCDSBinnedData): raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") - if self._source is None: + self._data = data + + def expectation(self, copy = True)-> Histogram: + # TODO: check coordsys from axis + # TODO: Earth occ always true in this case + + if self._data is None: raise RuntimeError("Call set_source() first.") - if self._sc_ori is None: - raise RuntimeError("Call set_spacecraft_history() first.") + if self._source is None: + raise RuntimeError("Call set_source() first.") # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 @@ -161,7 +167,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: # are expensive if self._psr is None or coord != self._last_convolved_source_skycoord: - coordsys = data.axes["PsiChi"].coordsys + coordsys = self._data.axes["PsiChi"].coordsys logger.info("... Calculating point source response ...") @@ -170,7 +176,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: dwell_time_map = self._sc_ori.get_dwell_map(coord, nside = self._nside) - self._psr = PointSourceResponse.from_dwell_time_map(data, + self._psr = PointSourceResponse.from_dwell_time_map(self._data, self._response, dwell_time_map, self._energy_axis, @@ -185,7 +191,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: earth_occ=True) self._psr = PointSourceResponse.from_scatt_map(coord, - data, + self._data, self._response, scatt_map, self._energy_axis, @@ -198,7 +204,7 @@ def expectation(self, data:BinnedDataInterface, copy = True)-> Histogram: self._source.spectrum.main.polarization) # Check if axes match - if data.axes != self._expectation.axes: + if self._data.axes != self._expectation.axes: raise ValueError( "Currently, the expectation axes must exactly match the detector response measurement axes") diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index 2de4c0f0..e808e470 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -1,7 +1,7 @@ import copy from cosipy.interfaces import BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, \ - BinnedDataInterface + BinnedDataInterface, DataInterface from astromodels import Model from astromodels.sources import PointSource, ExtendedSource @@ -26,22 +26,34 @@ def __init__(self, Response for :class:`astromodels.sources.ExtendedSource`s It can be None is you don't plan to use it for extended sources. """ + + # Interface inputs + self._data = None + self._model = None + + # Implementation inputs self._psr = point_source_response self._esr = extended_source_response - self._source_responses = {} # Cache # Prevent unnecessary calculations and new memory allocations - self._expectation = None - self._model = None - # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 - self._last_convolved_model_dict = None + self._cached_model_dict = None + self._source_responses = {} + self._expectation = None + + def set_data(self, data: BinnedDataInterface): + + if self._expectation is None or self._expectation.axes != data.axes: + # Needs new memory allocation, and recompute everything + self._expectation = Histogram(data.axes) + + self._data = data def set_model(self, model: Model): """ - + You need to call set_data() first. Parameters ---------- @@ -52,13 +64,26 @@ def set_model(self, model: Model): """ - if model is self._model: - # No need to do anything here - return + self._model = model + + def _cache_source_responses(self): + """ + Create a copy of the PSR and ESR for each source + Returns + ------- + + """ + + # This accounts for the possibility of some sources being added or + # removed from the model. + new_source_responses = {} - self._source_responses = {} + for name,source in self._model.sources.items(): - for name,source in model.sources.items(): + if name in self._source_responses: + # Used cache + new_source_responses[name] = self._source_responses[name] + continue if isinstance(source, PointSource): @@ -67,7 +92,8 @@ def set_model(self, model: Model): psr_copy = self._psr.copy() psr_copy.set_source(source) - self._source_responses[name] = psr_copy + psr_copy.set_data(self._data) + new_source_responses[name] = psr_copy elif isinstance(source, ExtendedSource): if self._esr is None: @@ -75,15 +101,16 @@ def set_model(self, model: Model): esr_copy = self._esr.copy() esr_copy.set_source(source) - self._source_responses[name] = esr_copy + esr_copy.set_data(self._data) + new_source_responses[name] = esr_copy else: raise RuntimeError(f"The model contains the source {name} " f"of type {type(source)}. I don't know " "how to handle it!") - self._model = model + self._source_responses = new_source_responses - def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: + def expectation(self, copy:bool = True)->Histogram: """ Parameters @@ -95,37 +122,40 @@ def expectation(self, data:BinnedDataInterface, copy:bool = True)->Histogram: ------- """ + + if self._data is None or self._model is None: + raise RuntimeError("Call set_data() and set_model() first") + # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 current_model_dict = self._model.to_dict() - if self._expectation is None or self._expectation.axes != data.axes: - # Needs new memory allocation, and recompute everything - self._expectation = Histogram(data.axes) - else: - # If nothing has changed in the model, we can use the cached expectation - # as is. - # If the model has changed but the axes haven't, we can at least reuse - # is and prevent new memory allocation, we just need to zero it out - - # TODO: currently Model.__eq__ seems broken. It returns. True even - # if the internal parameters changed. Caching the expected value - # is not implemented. Remove the "False and" when fixed - if self._last_convolved_model_dict == current_model_dict: - if copy: - return self._expectation.copy() - else: - return self._expectation + # If nothing has changed in the model, we can use the cached expectation + # as is. + # If the model has changed but the axes haven't, we can at least reuse + # it and prevent new memory allocation, we just need to zero it out + + # TODO: currently Model.__eq__ seems broken. It returns. True even + # if the internal parameters changed. Caching the expected value + # is not implemented. Remove the "False and" when fixed + if self._cached_model_dict is not None and self._cached_model_dict == current_model_dict: + if copy: + return self._expectation.copy() else: - self._expectation.clear() + return self._expectation + else: + self._expectation.clear() + + # Create a copy of the PSR and ESR for each source + self._cache_source_responses() # Convolve all sources with the response for source_name,psr in self._source_responses.items(): - self._expectation += psr.expectation(data) + self._expectation += psr.expectation() # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 - self._last_convolved_model_dict = current_model_dict + self._cached_model_dict = current_model_dict if copy: return self._expectation.copy() diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index f2b3f519..c9f2a5d5 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -143,14 +143,19 @@ def get_log_like(self) -> float: if self._data is None or self._response is None: raise RuntimeError("Set data and response before calling this function.") + self._response.set_data(self._data) + + if self.has_bkg: + self._bkg.set_data(self._data) + # Compute expectation including background # If we don't have background, we won't modify the expectation, so # it's safe to use the internal cache. - expectation = self._response.expectation(self._data, copy = self.has_bkg) + expectation = self._response.expectation(copy = self.has_bkg) if self.has_bkg: # We won't modify the bkg expectation, so it's safe to use the internal cache - expectation += self._bkg.expectation(self._data, copy = False) + expectation += self._bkg.expectation(copy = False) # Get the arrays expectation = expectation.contents diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index c55dce4b..7b3a0207 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -194,7 +194,6 @@ def main(): # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation # can provide the response for an arbitrary directions, Ei and Pol values. - # NOTE: this is currently only implemented for data in local coords psr = BinnedThreeMLPointSourceResponse(instrument_response, sc_history=sc_orientation, energy_axis = dr.axes['Ei'], @@ -332,7 +331,8 @@ def main(): spectral_shape=spectrum_inj) # Spectral model psr.set_source(source_inj) - expectation_inj = psr.expectation(data, copy=True) + psr.set_data(data) + expectation_inj = psr.expectation(copy=True) # The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the "evaluate_at" method of the spectrum diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 4f4494fe..fccc3963 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -7,7 +7,7 @@ import astropy.units as u from cosipy import SpacecraftHistory from cosipy.interfaces.background_interface import BackgroundDensityInterface -from cosipy.interfaces.data_interface import EventData, EventDataInterface +from cosipy.interfaces.data_interface import EventData, EventDataInterface, DataInterface from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood @@ -129,6 +129,7 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 + self._binned_data = None self._event_data = None # Doesn't need to be normalized @@ -140,11 +141,20 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm - def set_data(self, data: EventDataInterface) -> None: + def set_data(self, data: DataInterface) -> None: + if not isinstance(data, ToyData): raise TypeError(f"This class only support data of type {ToyData}") - self._event_data = data + if isinstance(data, BinnedDataInterface): + + if data.axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + self._binned_data = data + + if isinstance(data, EventDataInterface): + self._event_data = data def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]] = None) -> Iterable[float]: if data is None: @@ -168,13 +178,7 @@ def expectation_density(self, data: Optional[Union['EventDataInterface', Iterato def parameters(self) -> Dict[str, u.Quantity]: return {'norm': u.Quantity(self._norm)} - def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: - - if not isinstance(data, ToyData): - raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") - - if data.axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") + def expectation(self, copy = True) -> Histogram: # Always a copy return self._unit_expectation * self._norm @@ -190,6 +194,9 @@ def __init__(self): self._unit_expectation = Histogram(toy_axis, contents=np.diff(norm.cdf(toy_axis.edges))) + self._binned_data = None + self._event_data = None + def ncounts(self) -> float: if self._source is None: @@ -200,11 +207,20 @@ def ncounts(self) -> float: ns_events = self._source.spectrum.main.shape.k.value return ns_events - def set_data(self, data:EventDataInterface): + def set_data(self, data: DataInterface) -> None: + if not isinstance(data, ToyData): raise TypeError(f"This class only support data of type {ToyData}") - self._event_data = data + if isinstance(data, BinnedDataInterface): + + if data.axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + self._binned_data = data + + if isinstance(data, EventDataInterface): + self._event_data = data def expectation_density(self, data:Optional[Union[EventDataInterface, Iterator]] = None) -> Iterable[float]: @@ -235,13 +251,7 @@ def set_source(self, source: Source): self._source = source - def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: - - if not isinstance(data, ToyData): - raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") - - if data.axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") + def expectation(self, copy = True) -> Histogram: if self._source is None: raise RuntimeError("Set a source first") @@ -265,9 +275,14 @@ def __init__(self, psr: BinnedThreeMLSourceResponseInterface): if not isinstance(psr, ToyPointSourceResponse): raise TypeError(f"Wrong psr type '{type(psr)}', expected {ToyPointSourceResponse}.") + self._model = None + self._psr = psr self._psr_copies = {} + self._binned_data = None + self._event_data = None + def ncounts(self) -> float: ncounts = 0 @@ -277,14 +292,22 @@ def ncounts(self) -> float: return ncounts - def set_data(self, data:EventDataInterface): + def set_data(self, data: DataInterface) -> None: + if not isinstance(data, ToyData): raise TypeError(f"This class only support data of type {ToyData}") - self._event_data = data + if isinstance(data, BinnedDataInterface): + + self._binned_data = data + + if isinstance(data, EventDataInterface): + self._event_data = data def expectation_density(self, data: EventDataInterface = None) -> Iterable[float]: + self._cache_psr_copies() + if data is None: if self._event_data is None: @@ -302,21 +325,39 @@ def expectation_density(self, data: EventDataInterface = None) -> Iterable[float def set_model(self, model: Model): - self._psr_copies = {} + self._model = None + + def _cache_psr_copies(self): + + new_psr_copies = {} + for name,source in model.sources.items(): + + if name in self._psr_copies: + # Use cache + new_psr_copies[name] = self._psr_copies[name] + psr_copy = self._psr.copy() psr_copy.set_source(source) - self._psr_copies[name] = psr_copy - def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: + if isinstance(psr_copy, BinnedThreeMLSourceResponseInterface): + psr_copy.set_data(self._binned_data) - if not isinstance(data, ToyData): - raise TypeError(f"Wrong data type '{type(data)}', expected {ToyData}.") + if isinstance(psr_copy, UnbinnedThreeMLSourceResponseInterface): + psr_copy.set_data(self._event_data) + + new_psr_copies[name] = psr_copy + + self._psr_copies = new_psr_copies + + def expectation(self, copy = True) -> Histogram: + + self._cache_psr_copies() - expectation = Histogram(data.axes) + expectation = Histogram(self._binned_data.axes) for source_name,psr in self._psr_copies.items(): - expectation += psr.expectation(data, copy = False) + expectation += psr.expectation(copy = False) # Always a copy return expectation @@ -362,6 +403,7 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: #like_fun = PoissonLikelihood() like_fun = UnbinnedLikelihood() +# Call set_data() before set_response() and set_background() like_fun.set_data(data) like_fun.set_response(response) like_fun.set_background(bkg) @@ -396,7 +438,6 @@ def expectation(self, data: BinnedDataInterface, copy = True) -> Histogram: spectrum.k.value = s cosi.bkg_parameter['norm'].value = b - cosi._update_bkg_parameters() # Fix the need for this line loglike[i,j] = cosi.get_log_like() From 954bd156cc4c9025af2c239aa2ec4ddf6328c02e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 26 Sep 2025 14:09:37 -0400 Subject: [PATCH 070/133] Add tstart/tstop for all data types Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 3acca023..37e36e16 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -6,6 +6,8 @@ from .measurements import Measurement +from astropy.time import Time + import histpy __all__ = ["DataInterface", @@ -15,9 +17,21 @@ @runtime_checkable class DataInterface(Protocol): - """ - Not much... - """ + + @property + def tstart(self) -> Union[Time, None]: + """ + Start time of data taking + """ + return None + + @property + def tstop(self) -> Union[Time, None]: + """ + Start time of data taking + """ + return None + @runtime_checkable class EventDataInterface(DataInterface, Protocol): From e7a100ba3fcfd83b6c0230c6ed4fd05a5b332d76 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 29 Sep 2025 09:13:00 -0400 Subject: [PATCH 071/133] Change meaning of nevents to mean events after selection Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 39 ++++++++++++++++------------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 37e36e16..ebf8e13a 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -45,11 +45,14 @@ def __getitem__(self, item:Union[str, int]) -> Union[Tuple, Measurement]: def __iter__(self) -> Iterator[Tuple]: """ - Only loops through unmasked values + Only loops through selected values """ @property - def nevents(self) -> int:... + def nevents(self) -> int: + """ + After selection + """ @property def labels(self) -> Tuple[str]:... @@ -63,8 +66,8 @@ def nmeasurements(self) -> int: def set_selection(self, selection: Union[EventSelectorInterface, None]) -> None: """ - None would drop the selection. Implementation might not implement the ability to drop - a selection when the underlying data was discarded for efficiency reasons. + None would drop the selection. Implementation might not implement the ability to change or drop + a selection --e.g. the underlying data was discarded for efficiency reasons. """ @property @@ -73,10 +76,6 @@ def selection(self) -> Union[EventSelectorInterface, None]: The current selection set """ - @property - def nselected(self) -> int:... - - class EventData(EventDataInterface): """ Generic event data from measurement set @@ -95,8 +94,8 @@ def __init__(self, *data:Measurement): if size != data_i.size: raise ValueError("All measurement arrays must have the same size") + self._nevents_total = size self._nevents = size - self._nselected = size self._events = data self._labels = tuple([d.label for d in data]) self._selection = None @@ -115,6 +114,11 @@ def __iter__(self) -> Iterator[Tuple]: @property def nevents(self): + + if self._nevents == -1: + # Not yet cached since last set selection + self._nevents = sum(self._selection.select(self)) + return self._nevents @property @@ -129,26 +133,25 @@ def set_selection(self, selection:EventSelectorInterface) -> None: if selection is None: self._selection = None - self._nselected = self._nevents + self._nevents = self._nevents_total else: self._selection = selection # Signals the need to recompute this number - self._nselected = -1 + self._nevents = -1 @property def selection(self) -> EventSelectorInterface: return self._selection - def nselected(self) -> int: - - if self._nselected == -1: - # Not yet cached since last set selection - self._nselected = sum(self._selection.select(self)) - - return self._nselected + @property + def nevents_total(self) -> int: + """ + Before selection + """ + return self._nevents_total @runtime_checkable From 60d90287cd53382436bfb5ce8560757898b76fdb Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 29 Sep 2025 17:21:07 -0400 Subject: [PATCH 072/133] Rework unbinned data interfaces Signed-off-by: Israel Martinez --- cosipy/data_io/UnBinnedData.py | 55 +--- cosipy/interfaces/__init__.py | 2 +- cosipy/interfaces/data_interface.py | 152 ++++------ cosipy/interfaces/event.py | 83 +++++ .../event_data_processor_interface.py | 31 ++ cosipy/interfaces/event_selection.py | 45 +-- cosipy/interfaces/expectation_interface.py | 45 +-- cosipy/interfaces/measurements.py | 283 ------------------ .../examples/toy/toy_interfaces_example.py | 174 ++++++----- 9 files changed, 278 insertions(+), 592 deletions(-) create mode 100644 cosipy/interfaces/event.py create mode 100644 cosipy/interfaces/event_data_processor_interface.py delete mode 100644 cosipy/interfaces/measurements.py diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index 584f856c..ca6dc311 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -9,6 +9,7 @@ import time import cosipy from cosipy.data_io import DataIO +from cosipy.interfaces.data_interface import TimeTagEventDataInterface, EventDataWithEnergyInterface from cosipy.spacecraftfile import SpacecraftHistory import gzip import astropy.coordinates as astro_co @@ -25,10 +26,6 @@ import os import time -from cosipy.interfaces import QuantityMeasurement, AngleMeasurement, SkyCoordMeasurement, CachedQuantityMeasurement, \ - CachedAngleMeasurement, CachedSkyCoordMeasurement, CachedSkyCoordUnitSphericalMeasurement -from cosipy.interfaces.data_interface import EventData - logger = logging.getLogger(__name__) @@ -870,53 +867,3 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): return -class MeasuredEnergy(QuantityMeasurement, ABC): - """ - - """ - -class CachedMeasuredEnergy(CachedQuantityMeasurement, MeasuredEnergy): - """ - - """ - -class MeasuredScatteringAngle(AngleMeasurement, ABC): - """ - - """ - -class CachedMeasuredScatteringAngle(CachedAngleMeasurement, MeasuredScatteringAngle): - """ - - """ - - -class MeasuredScatteringDirection(SkyCoordMeasurement, ABC): - """ - - """ - -class CachedMeasuredScatteringDirection(CachedSkyCoordUnitSphericalMeasurement, MeasuredScatteringDirection): - """ - - """ - - -class EmCDSEventData(EventData): - - def __init__(self, - energy:MeasuredEnergy, - scattering_angle:MeasuredScatteringAngle, - scattering_direction:MeasuredScatteringDirection): - - if energy.label != "Em": - raise ValueError("The measured energy is expected to be labeled 'Em'") - - if scattering_angle.label != "Phi": - raise ValueError("The scattering angle is expected to be labeled 'Phi'") - - if scattering_direction.label != "PsiChi": - raise ValueError("The scattering direction is expected to be labeled 'PsiChi'") - - super().__init__(energy, scattering_angle, scattering_direction) - diff --git a/cosipy/interfaces/__init__.py b/cosipy/interfaces/__init__.py index b272f498..723a3e78 100644 --- a/cosipy/interfaces/__init__.py +++ b/cosipy/interfaces/__init__.py @@ -1,4 +1,4 @@ -from .measurements import * +from .event import * from .data_interface import * from .background_interface import * from .likelihood_interface import * diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index ebf8e13a..29aaa8f3 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,22 +1,29 @@ -from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union +import itertools +from abc import abstractmethod +from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable import numpy as np -from .event_selection import EventSelectorInterface -from histpy import Histogram, Axes +from astropy.units import Unit -from .measurements import Measurement +from . import EventWithEnergy +from .event import Event, FancyEnergyDataMixin, FancyTimeDataMixin, TimetaggedEvent +from histpy import Histogram, Axes from astropy.time import Time +# Guard to prevent circular import +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from .event_selection import EventSelectorInterface + import histpy __all__ = ["DataInterface", "EventDataInterface", "BinnedDataInterface", - "EventData"] +] -@runtime_checkable -class DataInterface(Protocol): +class DataInterface: @property def tstart(self) -> Union[Time, None]: @@ -32,134 +39,79 @@ def tstop(self) -> Union[Time, None]: """ return None + @property + def event_type(self) -> Type[Event]: + """ + Type returned by __getitem__ + """ + +class BinnedDataInterface(DataInterface): + @property + def data(self) -> Histogram:... + @property + def axes(self) -> Axes:... + -@runtime_checkable -class EventDataInterface(DataInterface, Protocol): +class EventDataInterface(DataInterface, Iterable): - def __getitem__(self, item:Union[str, int]) -> Union[Tuple, Measurement]: + def __iter__(self) -> Iterator[Event]: """ - If item is: - - str: the value of specific measurement for all events - - int: all measurements for an specific event (whether masked or unmasked) + Return one Event at a time """ - def __iter__(self) -> Iterator[Tuple]: + def __getitem__(self, item: int) -> Event: """ - Only loops through selected values + Convenience method. Pretty slow in general. It's suggested that + the implementations override it """ + return next(itertools.islice(self, item, None)) @property + @abstractmethod def nevents(self) -> int: """ After selection """ - @property - def labels(self) -> Tuple[str]:... - - @property - def nmeasurements(self) -> int: - """ - Number of Measurements. Each measurement can potentially have more tha one value - --e.g. RA,Dec can be considered a single measurement - """ + def __len__(self): + return self.nevents - def set_selection(self, selection: Union[EventSelectorInterface, None]) -> None: + def set_selection(self, selection: Union["EventSelectorInterface", None]) -> None: """ None would drop the selection. Implementation might not implement the ability to change or drop a selection --e.g. the underlying data was discarded for efficiency reasons. """ @property - def selection(self) -> Union[EventSelectorInterface, None]: + def selection(self) -> Union["EventSelectorInterface", None]: """ The current selection set """ -class EventData(EventDataInterface): - """ - Generic event data from measurement set - """ - - def __init__(self, *data:Measurement): - - # Check shame - size = None - - for data_i in data: + def get_binned_data(self, *args, **kwargs) -> BinnedDataInterface: + raise NotImplementedError - if size is None: - size = data_i.size - else: - if size != data_i.size: - raise ValueError("All measurement arrays must have the same size") +class TimeTagEventDataInterface(FancyTimeDataMixin, EventDataInterface): - self._nevents_total = size - self._nevents = size - self._events = data - self._labels = tuple([d.label for d in data]) - self._selection = None - - def __getitem__(self, item:[Union[str, int]]) -> Union[Tuple, Measurement]: - - if isinstance(item, str): - return self._events[self._labels.index(item)] - elif isinstance(item, int): - return tuple([d[item] for d in self._events]) - else: - raise TypeError("Index must be either a measurement label or an entry position.") - - def __iter__(self) -> Iterator[Tuple]: - return zip(*self._events) - - @property - def nevents(self): - - if self._nevents == -1: - # Not yet cached since last set selection - self._nevents = sum(self._selection.select(self)) - - return self._nevents + def __getitem__(self, item: int) -> TimetaggedEvent:... @property - def labels(self): - return self._labels + @abstractmethod + def jd1(self) -> Iterable[float]: ... @property - def nmeasurements(self) -> int: - return len(self._events) - - def set_selection(self, selection:EventSelectorInterface) -> None: - - if selection is None: - self._selection = None - self._nevents = self._nevents_total - else: - - self._selection = selection + @abstractmethod + def jd2(self) -> Iterable[float]: ... - # Signals the need to recompute this number - self._nevents = -1 +class EventDataWithEnergyInterface(FancyEnergyDataMixin, EventDataInterface): - @property - def selection(self) -> EventSelectorInterface: - return self._selection + def __getitem__(self, item: int) -> EventWithEnergy:... @property - def nevents_total(self) -> int: - """ - Before selection - """ - - return self._nevents_total - + @abstractmethod + def energy_value(self) -> Iterable[float]:... -@runtime_checkable -class BinnedDataInterface(DataInterface, Protocol): @property - def data(self) -> Histogram:... - @property - def axes(self) -> Axes:... - - + @abstractmethod + def energy_unit(self) -> Unit:... diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py new file mode 100644 index 00000000..e7ad6d20 --- /dev/null +++ b/cosipy/interfaces/event.py @@ -0,0 +1,83 @@ +from abc import ABC, abstractmethod +from typing import Sequence, Union + +from astropy.time import Time +from astropy.units import Quantity, Unit + +__all__ = [ + "Event", + "TimetaggedEvent", + "EventWithEnergy", +] + +class Event(Sequence, ABC): + """ + Need to implement __getitem__ + """ + + @classmethod + @abstractmethod + def size(cls) -> int: + """ + Number of values + """ + + def __len__(self): + return self.size + +class FancyTimeDataMixin(ABC): + + @property + @abstractmethod + def jd1(self) -> Union[float, Sequence[float]]:... + + @property + @abstractmethod + def jd2(self) -> Union[float, Sequence[float]]:... + + @property + def time(self) -> Time: + """ + Add fancy time + """ + return Time(self.jd1, self.jd2, format = 'jd') + +class TimetaggedEvent(FancyTimeDataMixin, Sequence): + + @property + @abstractmethod + def jd1(self) -> float:... + + @property + @abstractmethod + def jd2(self) -> float:... + + +class FancyEnergyDataMixin(ABC): + + @property + @abstractmethod + def energy_value(self) -> Union[float, Sequence[float]]:... + + @property + @abstractmethod + def energy_unit(self) -> Unit:... + + @property + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_value, self.energy_unit) + + +class EventWithEnergy(FancyEnergyDataMixin, Sequence): + + @property + @abstractmethod + def energy_value(self) -> float:... + + @property + @abstractmethod + def energy_unit(self) -> Unit:... + diff --git a/cosipy/interfaces/event_data_processor_interface.py b/cosipy/interfaces/event_data_processor_interface.py new file mode 100644 index 00000000..967d4076 --- /dev/null +++ b/cosipy/interfaces/event_data_processor_interface.py @@ -0,0 +1,31 @@ +from typing import Protocol, Optional, Iterable + +from cosipy.interfaces import EventDataInterface, Event + +class EventDataProcessorInterface(Protocol): + """ + Get a output per event + + Iterables can be anything. The implementations do not necessarily need to + process the data event by event. + """ + + def set_data(self, data: EventDataInterface):... + + def process(self, data: Optional[Iterable[Event]]) -> Iterable: + """ + 2 calling mechanisms + + 1. + processor.process() + + In this case process() will call iter(data), where data was passed though set_data() + + 2. + processor.process(data_subset: Iterable[float]) + + The implementation will use the general data properties from the set_data() call, + but will call iter(data_subset) instead. + This allows the user to cache event data, in addition to looping over only a + portion of the data + """ \ No newline at end of file diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py index a2b14ed0..61de1e56 100644 --- a/cosipy/interfaces/event_selection.py +++ b/cosipy/interfaces/event_selection.py @@ -1,48 +1,13 @@ from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional -from typing import TYPE_CHECKING -if TYPE_CHECKING: - # Guard to prevent circular import - from .data_interface import EventDataInterface +from . import Event +from .event_data_processor_interface import EventDataProcessorInterface @runtime_checkable -class EventWeightingInterface(Protocol): - """ - 3 calling mechanism +class EventSelectorInterface(EventDataProcessorInterface, Protocol): - 1. - weights.set_data(data) - weights.weight() - - In this case weight() will call iter(data) - - 2. - weights.weight(data) - - In this case weight() will first call set_data(data) (if needed), and then iter(data). - - 3. - weights.set_data(data) - weights.weight(iterator) - - This prevents weight() from calling iter(data). However, it is assumed that - iterator is equivalent to iter(data). This allows to use cached versions - of the iterator or itertools.tee. - """ - - def set_data(self, data:'EventDataInterface'):... - - def weight(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]:... - -@runtime_checkable -class EventSelectorInterface(EventWeightingInterface, Protocol): - - def set_data(self, data:'EventDataInterface'):... - - def select(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[bool]: + def select(self, data: Optional[Iterable[Event]]) -> Iterable[bool]: """ Returns True to keep an event, False to filter it out. """ - - def weight(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]: - return self.select(data) + return self.process(data) diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 6d670eb8..d1c30916 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -2,13 +2,16 @@ import histpy import numpy as np -from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface +from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface, Event __all__ = [ "ExpectationDensityInterface", "BinnedExpectationInterface" ] +from cosipy.interfaces.event_data_processor_interface import EventDataProcessorInterface + + @runtime_checkable class ExpectationInterface(Protocol): def set_data(self, data: DataInterface):... @@ -31,41 +34,17 @@ def expectation(self, copy: Optional[bool])->histpy.Histogram: """ @runtime_checkable -class ExpectationDensityInterface(ExpectationInterface, Protocol): - """ - 3 calling mechanisms - - 1. - expectation.set_data(data) - expectation.expectation_density() - - In this case expectation_density() will call iter(data) - - 2. - expectation.expectation_density(data) - - In this case expectation_density() will first call set_data(data) (if needed), and then iter(data). - - 3. - expectation.set_data(data) - expectation.expectation_density(iterator) - - This prevents expectation_density() from calling iter(data). However, it is assumed that - iterator is equivalent to iter(data). This allows to use cached versions - of the iterator or itertools.tee. - """ +class ExpectationDensityInterface(ExpectationInterface, EventDataProcessorInterface, Protocol): - def ncounts(self) -> float:... - def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]]) -> Iterable[float]: + def ncounts(self) -> float: """ - Parameters - ---------- - data - - Returns - ------- - + Total expected counts """ + def expectation_density(self, data: Iterable[Event]) -> Iterable[float]: + return self.process(data) + + def get_binned_expectation(self, *args, **kwargs): + raise NotImplementedError diff --git a/cosipy/interfaces/measurements.py b/cosipy/interfaces/measurements.py deleted file mode 100644 index 2677cb51..00000000 --- a/cosipy/interfaces/measurements.py +++ /dev/null @@ -1,283 +0,0 @@ -import itertools -from abc import ABC, abstractmethod -from collections.abc import Iterator, Sequence -from typing import Tuple, Type, TypeVar, Generic, ClassVar, Union, Generator, Iterable - -import numpy as np -from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, UnitSphericalRepresentation, \ - CartesianRepresentation -from astropy.units import Quantity, Unit -from numpy.typing import NDArray - -class MeasurementIterator(Iterator): - - def __next__(self) -> Union[int, float, Tuple[Union[int, float]]]:... - -class Measurement(Sequence): - - def __init__(self, label:str, *args, **kwargs): - self._label = label - - # Needs __len__ and either __iter__ or __getitem__ (or both) - - @property - def label(self) -> str: - return self._label - - @property - def size(self) -> int: - return len(self) - - @property - @abstractmethod - def value_type(self) -> Union[Type, Tuple[Type]]: - """ - Types return by __iter__ and __getitem__ - """ - - @property - def nvalues(self) -> int: - if isinstance(self.value_type, tuple): - return len(self.value_type) - else: - return 1 - - def cache(self, start=None, stop=None, step=None) -> Iterable: - values = [] - for value in itertools.islice(self, start, stop, step): - values.append(value) - - return values - -class FloatingMeasurement(Measurement, ABC): - - @property - def value_type(self) -> Union[Type, Tuple[Type]]: - return float - - def cache(self, start=None, stop=None, step=None) -> NDArray[float]: - values = super().cache(start, stop, step) - return np.asarray(values) - -class CachedFloatingMeasurement(FloatingMeasurement): - - def __init__(self, label:str, array: np.ndarray[float]): - if array.ndim != 1: - raise ValueError("This class handles 1D and only 1D arrays") - - super().__init__(label) - self._array = array - - def __len__(self): - return self._array.size - - def __iter__(self): - return iter(self._array) - - def __getitem__(self, item): - return self._array[item] - - def cache(self, start=None, stop=None, step=None) -> np.ndarray[float]: - return self._array[start, stop, step] - - -class QuantityMeasurement(FloatingMeasurement, ABC): - - @property - @abstractmethod - def unit(self) -> Unit:... - - def fancy_iter(self, start = None, stop = None, step = None) -> Generator[Quantity, None, None]: - for value in itertools.islice(self, start, stop, step): - yield Quantity(value, self.unit) - - def cache(self, start = None, stop = None, step = None) -> Quantity: - return Quantity(super().cache(start, stop, step), self.unit) - - -class CachedQuantityMeasurement(CachedFloatingMeasurement, QuantityMeasurement): - - def __init__(self, label:str, array: Quantity): - if array.ndim != 1: - raise ValueError("This class handles 1D and only 1D arrays") - - super().__init__(label, array.value) - self._array = array.value - self._unit = array.unit - - def unit(self) -> Unit: - return self._unit - - def cache(self, start = None, stop = None, step = None) -> Quantity: - return Quantity(self._array[start:stop:step], self.unit) - - def fancy_iter(self, start=None, stop=None, step=None) -> Generator[Quantity, None, None]: - return iter(self.cache) - -class AngleMeasurement(QuantityMeasurement, ABC): - - def fancy_iter(self, start=None, stop=None, step=None) -> Generator[Quantity, None, None]: - for value in itertools.islice(self, start, stop, step): - yield Angle(value, self.unit) - - def cache(self, start=None, stop=None, step=None) -> Angle: - return Angle(super().cache(start, stop, step), self.unit) - -class CachedAngleMeasurement(CachedQuantityMeasurement): - - def __init__(self, label:str, array: Angle): - super().__init__(label, Quantity(array.value, array.unit)) - - def cache(self, start = None, stop = None, step = None) -> Quantity: - return Angle(self._array[start:stop:step], self.unit) - -class SkyCoordMeasurement(Measurement, ABC): - """ - - """ - - @property - @abstractmethod - def frame(self) -> BaseCoordinateFrame:... - @property - @abstractmethod - def unit(self) -> Unit:... - - def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement':... - def as_cartesian(self) -> 'SkyCoordCartesianMeasurement':... - -class CachedSkyCoordMeasurement(SkyCoordMeasurement, ABC): - - def __init__(self, label: str, coord: SkyCoord): - - super().__init__(label) - - if coord.ndim != 1: - raise ValueError("This class handles 1D and only 1D SkyCoord arrays") - - self._frame = coord.frame - - self._unit = None # Set by child class. Type Unit - self._data = None # Set by child class. array of shape (self.size,self.nvalues) - - def __len__(self): - return self._data.shape[0] - - @property - def frame(self) -> BaseCoordinateFrame: - return self._unit - - @property - def unit(self) -> Unit: - return self._unit - - def __iter__(self): - return iter(self._data) - - def __getitem__(self, item): - return self._data[item] - - @abstractmethod - def cache(self, start=None, stop=None, step=None) -> SkyCoord:... - - def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: - return iter(self.cache) - - -class SkyCoordUnitSphericalMeasurement(SkyCoordMeasurement, ABC): - - @property - def value_type(self) -> Union[Type, Tuple[Type]]: - return (float, float) - - def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: - for lon,lat in itertools.islice(self, start, stop, step): - yield SkyCoord(lon, lat, unit = self.unit, frame = self.frame) - - def cache(self, start=None, stop=None, step=None) -> SkyCoord: - lon = [] - lat = [] - for lon_i,lat_i in itertools.islice(self, start, stop, step): - lon.append(lon_i) - lat.append(lat_i) - - return SkyCoord(lon, lat, unit = self.unit, frame = self.frame) - - def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement': - return self - - def as_cartesian(self) -> 'SkyCoordCartesianMeasurement':... - - -class CachedSkyCoordUnitSphericalMeasurement(CachedSkyCoordMeasurement, SkyCoordUnitSphericalMeasurement): - - def __init__(self, label:str, coord: SkyCoord): - - super().__init__(label, coord) - - rep = coord.represent_as('unitspherical') - - self._unit = rep.unit - self._data = np.asarray([rep.lon.value, rep.lat.value]).transpose() - - def cache(self, start=None, stop=None, step=None) -> SkyCoord: - return SkyCoord(self._data[start:stop:step,0], self._data[start:stop:step,1], unit = self.unit, frame = self.frame) - - def as_cartesian(self) -> 'SkyCoordCartesianMeasurement': - rep = UnitSphericalRepresentation(Quantity(self._data[:,0], self.unit), Quantity(self._data[:,1], self.unit)) - cart_rep = rep.represent_as('cartesian') - - coord = SkyCoord(x = cart_rep.x, y = cart_rep.y, z = cart_rep.z, frame = self.frame, representation_type = 'cartesian') - - return CachedSkyCoordCartesianMeasurement(self.label, coord) - - -class SkyCoordCartesianMeasurement(SkyCoordMeasurement, ABC): - - @property - def value_type(self) -> Union[Type, Tuple[Type]]: - return (float, float, float) - - def fancy_iter(self, start=None, stop=None, step=None) -> Generator[SkyCoord, None, None]: - for x,y,z in itertools.islice(self, start, stop, step): - yield SkyCoord(x=x,y=y,z=z, unit = self.unit, frame = self.frame(), representation_type='cartesian') - - def cache(self, start=None, stop=None, step=None) -> SkyCoord: - x = [] - y = [] - z = [] - for x_i,y_i,z_i in itertools.islice(self, start, stop, step): - x.append(x_i) - y.append(y_i) - z.append(z_i) - - return SkyCoord(x=x,y=y,z=z, unit = self.unit, frame = self.frame(), representation_type='cartesian') - - def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement':... - - def as_cartesian(self) -> 'SkyCoordCartesianMeasurement': - return self - - -class CachedSkyCoordCartesianMeasurement(SkyCoordCartesianMeasurement): - - def __init__(self, label:str, coord: SkyCoord): - super().__init__(label, coord) - - rep = coord.represent_as('cartesian') - - self._unit = rep.unit - - self._data = np.asarray([rep.x.value, rep.y.value, rep.z.value]).transpose() - - def cache(self, start=None, stop=None, step=None) -> SkyCoord: - return SkyCoord(x = self._data[start:stop:step, 0], y = self._data[start:stop:step, 1], z = self._data[start:stop:step, 2], - unit = self.unit, frame = self.frame, representation_type = 'cartesian') - - def as_unit_spherical(self) -> 'SkyCoordUnitSphericalMeasurement': - rep = CartesianRepresentation(self._data[:,0], self._data[:,1], self._data[:,2]) - usph_rep = rep.represent_as('unitspherical') - - coord = SkyCoord(usph_rep.lon, usph_rep.lat, frame = self.frame) - - return CachedSkyCoordUnitSphericalMeasurement(self.label, coord) \ No newline at end of file diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index fccc3963..86ec54aa 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -1,5 +1,5 @@ import itertools -from typing import Dict, Any, Generator, Iterator, Iterable, Optional, Union +from typing import Dict, Any, Generator, Iterator, Iterable, Optional, Union, Type from astromodels.sources import Source from astromodels import LinearPolarization, SpectralComponent, Parameter @@ -7,7 +7,7 @@ import astropy.units as u from cosipy import SpacecraftHistory from cosipy.interfaces.background_interface import BackgroundDensityInterface -from cosipy.interfaces.data_interface import EventData, EventDataInterface, DataInterface +from cosipy.interfaces.data_interface import EventDataInterface, DataInterface from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood @@ -16,8 +16,8 @@ BinnedBackgroundInterface, BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, - ThreeMLPluginInterface, BackgroundInterface, FloatingMeasurement, - UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface) + ThreeMLPluginInterface, + UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, Event) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -37,79 +37,86 @@ to see a plot on how it looks like. It looks nothing like COSI data, but -shows how generic the interfaces can be. I'm still working -on refactoring our current code to this format. +shows how generic the interfaces can be. """ # ======== Create toy interfaces for this model =========== # Simple 1D axes. Hardcoded. -toy_axis = Axis(np.linspace(-5, 5)) +toy_axis = Axis(np.linspace(-5, 5), label = 'x') nevents_signal = 1000 nevents_bkg = 1000 nevents_tot = nevents_signal + nevents_bkg -class ToyMeasurementIterator(Iterator): - # Random data. Normal signal on top of uniform bkg - # Keeps track of initial random seed - - def __init__(self, iterable: 'ToyMeasurement'): - self._iter = iterable - self._rng = np.random.default_rng() +class ToyEvent(Event): + """ + Unit-less 1D data of a measurement called "x" (could be anything) + """ - # Restart - self._rng.__setstate__(self._iter._rng_init) - self._pos = 0 + def __init__(self, x): + self._x = x - def __next__(self): - if self._pos >= nevents_tot: - raise StopIteration + @classmethod + def size(cls): + return 1 - self._pos += 1 + @property + def x(self): + return self._x - if self._rng.uniform(0, nevents_tot) < nevents_signal: - return self._rng.normal() + def __getitem__(self, item): + if item is 0: + return self._x else: - return self._rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim) + raise IndexError("Out of bounds. This Event type has a single value.") + +class ToyData(DataInterface): + @property + def event_type(self) -> Type[Event]: + return ToyEvent + +class ToyEventData(EventDataInterface, ToyData): + # Random data. Normal signal on top of uniform bkg -class ToyMeasurement(FloatingMeasurement): + def __init__(self): - def __init__(self, label): + rng = np.random.default_rng() - super().__init__(label) + self._events = np.append(rng.normal(size = nevents_signal), rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size = nevents_bkg)) - # Keep track of init seed to allow iterator - self._rng_init = np.random.default_rng().__getstate__() + np.random.shuffle(self._events) - def __iter__(self): - return ToyMeasurementIterator(self) + self._nevents = nevents_tot def __getitem__(self, item): - # This is inefficient unless the implementation caches the values - with iter(self) as i: - for _ in range(item): - next(i) + return ToyEvent(self._events[item]) - return next(i) + def __iter__(self) -> Iterator[ToyEvent]: + return iter(ToyEvent(x) for x in self._events) - def __len__(self): - return nevents_tot + @property + def nevents(self) -> int: + return self._nevents -class ToyData(BinnedDataInterface, EventData): - # Random data. Normal signal on top of uniform bkg - # Since the interfaces are Protocols, they don't *have* - # to derive from the base class, but doing some helps - # code readability, especially if you use an IDE. + def get_binned_data(self) -> "ToyBinnedData": - def __init__(self): - # Unbinned - measurements = ToyMeasurement('x') - EventData.__init__(self, measurements) + binned_data = Histogram(toy_axis) + binned_data.fill(self._events) + + return ToyBinnedData(binned_data) + +class ToyBinnedData(BinnedDataInterface, ToyData): + + def __init__(self, data:Histogram): - # Binned - self._data = Histogram(toy_axis) - self._data.fill(np.asarray(measurements)) + if data.ndim != 1: + raise ValueError("ToyBinnedData only take a 1D histogram") + + if data.axis.label != 'x': + raise ValueError("ToyBinnedData requires an axis labeled 'x'") + + self._data = data @property def data(self) -> Histogram: @@ -119,9 +126,15 @@ def data(self) -> Histogram: def axes(self) -> Axes: return self._data.axes + + class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): """ Models a uniform background + + # Since the interfaces are Protocols, they don't *have* + # to derive from the base class, but doing some helps + # code readability, especially if you use an IDE. """ def __init__(self): @@ -156,19 +169,14 @@ def set_data(self, data: DataInterface) -> None: if isinstance(data, EventDataInterface): self._event_data = data - def expectation_density(self, data: Optional[Union['EventDataInterface', Iterator]] = None) -> Iterable[float]: + def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: if data is None: - if self._event_data is None: + if self._event_data: raise RuntimeError("You need to either provide the data or call set_data() first.") data = self._event_data - elif isinstance(data, EventDataInterface): - - # Runs some checks - self.set_data(data) - density = self._norm * self._unit_expectation_density for _ in data: @@ -222,7 +230,7 @@ def set_data(self, data: DataInterface) -> None: if isinstance(data, EventDataInterface): self._event_data = data - def expectation_density(self, data:Optional[Union[EventDataInterface, Iterator]] = None) -> Iterable[float]: + def expectation_density(self, data:Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: if data is None: @@ -231,15 +239,10 @@ def expectation_density(self, data:Optional[Union[EventDataInterface, Iterator]] data = self._event_data - elif isinstance(data, EventDataInterface): - - # Runs some checks - self.set_data(data) - # I expect in the real case it'll be more efficient to compute # (ncounts, ncounts*prob) than (ncounts, prob) - cache = self.ncounts()*norm.pdf([x for x, in data]) + cache = self.ncounts()*norm.pdf([event.x for event in data]) for n in cache: yield n @@ -298,13 +301,15 @@ def set_data(self, data: DataInterface) -> None: raise TypeError(f"This class only support data of type {ToyData}") if isinstance(data, BinnedDataInterface): - self._binned_data = data if isinstance(data, EventDataInterface): self._event_data = data - def expectation_density(self, data: EventDataInterface = None) -> Iterable[float]: + def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: + + if self._event_data is None: + raise RuntimeError("Set data first") self._cache_psr_copies() @@ -315,11 +320,8 @@ def expectation_density(self, data: EventDataInterface = None) -> Iterable[float data = self._event_data - elif isinstance(data, EventDataInterface): - - # Runs some checks - self.set_data(data) + # One by one in this example, but they can also be done in chunks (e.g. with itertools batched or islice) for expectations in zip(*[p.expectation_density(d) for p,d in zip(self._psr_copies.values(), itertools.tee(data))]): yield np.sum(expectations) @@ -340,10 +342,10 @@ def _cache_psr_copies(self): psr_copy = self._psr.copy() psr_copy.set_source(source) - if isinstance(psr_copy, BinnedThreeMLSourceResponseInterface): + if isinstance(psr_copy, BinnedThreeMLSourceResponseInterface) and self._binned_data is not None: psr_copy.set_data(self._binned_data) - if isinstance(psr_copy, UnbinnedThreeMLSourceResponseInterface): + if isinstance(psr_copy, UnbinnedThreeMLSourceResponseInterface) and self._event_data is not None: psr_copy.set_data(self._event_data) new_psr_copies[name] = psr_copy @@ -352,6 +354,9 @@ def _cache_psr_copies(self): def expectation(self, copy = True) -> Histogram: + if self._binned_data is None: + raise RuntimeError("Set data first") + self._cache_psr_copies() expectation = Histogram(self._binned_data.axes) @@ -364,10 +369,14 @@ def expectation(self, copy = True) -> Histogram: # ======= Actual code. This is how the "tutorial" will look like ================ +# Binned or unbinned +unbinned = True + # Set the inputs. These will eventually open file or set specific parameters, # but since we are generating the data and models on the fly, and most parameter # are hardcoded above withing the classes, then it's not necessary here. -data = ToyData() +event_data = ToyEventData() +binned_data = event_data.get_binned_data() psr = ToyPointSourceResponse() response = ToyModelFolding(psr) bkg = ToyBkg() @@ -399,12 +408,13 @@ def expectation(self, copy = True) -> Histogram: #model = Model() # Uncomment for bkg-only hypothesis # Fit -# Uncomment one. Either one works -#like_fun = PoissonLikelihood() -like_fun = UnbinnedLikelihood() +if unbinned: + like_fun = UnbinnedLikelihood() + like_fun.set_data(event_data) +else: + like_fun = PoissonLikelihood() + like_fun.set_data(binned_data) -# Call set_data() before set_response() and set_background() -like_fun.set_data(data) like_fun.set_response(response) like_fun.set_background(bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) @@ -423,10 +433,12 @@ def expectation(self, copy = True) -> Histogram: # Plot results fig, ax = plt.subplots() -data.data.plot(ax) -expectation = response.expectation(data) +binned_data.data.plot(ax) +response.set_data(binned_data) +bkg.set_data(binned_data) +expectation = response.expectation(binned_data) if bkg is not None: - expectation = expectation + bkg.expectation(data) + expectation = expectation + bkg.expectation(binned_data) expectation.plot(ax) plt.show() From d18f0d32a141b251be7084ce0b838282f2ad5b6f Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 1 Oct 2025 06:38:50 -0400 Subject: [PATCH 073/133] Simplify interfaces. Remove set_data and other methods not strictly necessary. Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 10 +- cosipy/data_io/UnBinnedData.py | 2 +- cosipy/interfaces/data_interface.py | 28 +--- cosipy/interfaces/event.py | 20 +-- .../event_data_processor_interface.py | 31 ---- cosipy/interfaces/event_selection.py | 4 +- cosipy/interfaces/expectation_interface.py | 17 ++- .../response/threeml_point_source_response.py | 11 +- cosipy/response/threeml_response.py | 20 +-- cosipy/statistics/likelihood_functions.py | 19 +-- ...mple_crab_fit_threeml_plugin_interfaces.py | 8 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 7 +- ...ample_grb_fit_threeml_plugin_interfaces.py | 5 +- .../examples/toy/toy_interfaces_example.py | 136 +++--------------- 14 files changed, 69 insertions(+), 249 deletions(-) delete mode 100644 cosipy/interfaces/event_data_processor_interface.py diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index a53cd6c1..84e32de1 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -116,16 +116,12 @@ def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: def parameters(self) -> Dict[str, u.Quantity]: return {l:u.Quantity(n) for l,n in self.norms.items()} - def set_data(self, data: DataInterface): - - if data.axes != self.meausured_axes: - raise ValueError("Requested axes do not match the background component axes") - - def expectation(self, copy:bool = True)->Histogram: + def expectation(self, axes:Axes, copy:bool = True)->Histogram: """ Parameters ---------- + axes copy: If True, it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that @@ -136,6 +132,8 @@ def expectation(self, copy:bool = True)->Histogram: """ + if axes != self.meausured_axes: + raise ValueError("Requested axes do not match the background component axes") # Check if we can use the cache if self._expectation is None: diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index ca6dc311..147e5324 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -9,7 +9,7 @@ import time import cosipy from cosipy.data_io import DataIO -from cosipy.interfaces.data_interface import TimeTagEventDataInterface, EventDataWithEnergyInterface +from cosipy.interfaces.data_interface import TimeTagEventData, EventDataWithEnergy from cosipy.spacecraftfile import SpacecraftHistory import gzip import astropy.coordinates as astro_co diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 29aaa8f3..5b702508 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -6,7 +6,7 @@ from astropy.units import Unit from . import EventWithEnergy -from .event import Event, FancyEnergyDataMixin, FancyTimeDataMixin, TimetaggedEvent +from .event import Event, FancyEnergyDataMixin, FancyTimeDataMixin, TimeTagEvent from histpy import Histogram, Axes from astropy.time import Time @@ -25,24 +25,10 @@ class DataInterface: - @property - def tstart(self) -> Union[Time, None]: - """ - Start time of data taking - """ - return None - - @property - def tstop(self) -> Union[Time, None]: - """ - Start time of data taking - """ - return None - @property def event_type(self) -> Type[Event]: """ - Type returned by __getitem__ + Type returned by __iter__ in the event data case """ class BinnedDataInterface(DataInterface): @@ -88,12 +74,12 @@ def selection(self) -> Union["EventSelectorInterface", None]: The current selection set """ - def get_binned_data(self, *args, **kwargs) -> BinnedDataInterface: + def get_binned_data(self, axes:Axes, *args, **kwargs) -> BinnedDataInterface: raise NotImplementedError -class TimeTagEventDataInterface(FancyTimeDataMixin, EventDataInterface): +class TimeTagEventData(FancyTimeDataMixin, EventDataInterface): - def __getitem__(self, item: int) -> TimetaggedEvent:... + def __iter__(self) -> Iterator[TimeTagEvent]:... @property @abstractmethod @@ -103,9 +89,9 @@ def jd1(self) -> Iterable[float]: ... @abstractmethod def jd2(self) -> Iterable[float]: ... -class EventDataWithEnergyInterface(FancyEnergyDataMixin, EventDataInterface): +class EventDataWithEnergy(FancyEnergyDataMixin, EventDataInterface): - def __getitem__(self, item: int) -> EventWithEnergy:... + def __iter__(self) -> Iterator[EventWithEnergy]:... @property @abstractmethod diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index e7ad6d20..bb7bfa86 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -6,25 +6,15 @@ __all__ = [ "Event", - "TimetaggedEvent", + "TimeTagEvent", "EventWithEnergy", ] -class Event(Sequence, ABC): +class Event(ABC): """ - Need to implement __getitem__ + Derived classes implement all accessors """ - @classmethod - @abstractmethod - def size(cls) -> int: - """ - Number of values - """ - - def __len__(self): - return self.size - class FancyTimeDataMixin(ABC): @property @@ -42,7 +32,7 @@ def time(self) -> Time: """ return Time(self.jd1, self.jd2, format = 'jd') -class TimetaggedEvent(FancyTimeDataMixin, Sequence): +class TimeTagEvent(FancyTimeDataMixin): @property @abstractmethod @@ -71,7 +61,7 @@ def energy(self) -> Quantity: return Quantity(self.energy_value, self.energy_unit) -class EventWithEnergy(FancyEnergyDataMixin, Sequence): +class EventWithEnergy(FancyEnergyDataMixin): @property @abstractmethod diff --git a/cosipy/interfaces/event_data_processor_interface.py b/cosipy/interfaces/event_data_processor_interface.py deleted file mode 100644 index 967d4076..00000000 --- a/cosipy/interfaces/event_data_processor_interface.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Protocol, Optional, Iterable - -from cosipy.interfaces import EventDataInterface, Event - -class EventDataProcessorInterface(Protocol): - """ - Get a output per event - - Iterables can be anything. The implementations do not necessarily need to - process the data event by event. - """ - - def set_data(self, data: EventDataInterface):... - - def process(self, data: Optional[Iterable[Event]]) -> Iterable: - """ - 2 calling mechanisms - - 1. - processor.process() - - In this case process() will call iter(data), where data was passed though set_data() - - 2. - processor.process(data_subset: Iterable[float]) - - The implementation will use the general data properties from the set_data() call, - but will call iter(data_subset) instead. - This allows the user to cache event data, in addition to looping over only a - portion of the data - """ \ No newline at end of file diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py index 61de1e56..16d3ba0a 100644 --- a/cosipy/interfaces/event_selection.py +++ b/cosipy/interfaces/event_selection.py @@ -1,13 +1,11 @@ from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional from . import Event -from .event_data_processor_interface import EventDataProcessorInterface @runtime_checkable -class EventSelectorInterface(EventDataProcessorInterface, Protocol): +class EventSelectorInterface(Protocol): def select(self, data: Optional[Iterable[Event]]) -> Iterable[bool]: """ Returns True to keep an event, False to filter it out. """ - return self.process(data) diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index d1c30916..5d45ffb0 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -2,6 +2,8 @@ import histpy import numpy as np +from histpy import Axes + from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface, Event __all__ = [ @@ -9,20 +11,20 @@ "BinnedExpectationInterface" ] -from cosipy.interfaces.event_data_processor_interface import EventDataProcessorInterface - @runtime_checkable class ExpectationInterface(Protocol): - def set_data(self, data: DataInterface):... + pass @runtime_checkable class BinnedExpectationInterface(ExpectationInterface, Protocol): - def expectation(self, copy: Optional[bool])->histpy.Histogram: + def expectation(self, axes:Axes, copy: Optional[bool])->histpy.Histogram: """ Parameters ---------- + axes: + Axes to bin the expectation into copy: If True (default), it will return an array that the user if free to modify. Otherwise, it will result a reference, possible to the cache, that @@ -34,7 +36,7 @@ def expectation(self, copy: Optional[bool])->histpy.Histogram: """ @runtime_checkable -class ExpectationDensityInterface(ExpectationInterface, EventDataProcessorInterface, Protocol): +class ExpectationDensityInterface(ExpectationInterface, Protocol): def ncounts(self) -> float: """ @@ -42,9 +44,10 @@ def ncounts(self) -> float: """ def expectation_density(self, data: Iterable[Event]) -> Iterable[float]: - return self.process(data) + """ + """ - def get_binned_expectation(self, *args, **kwargs): + def get_binned_expectation(self, axes:Axes, *args, **kwargs): raise NotImplementedError diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 2c46b699..0a625257 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -39,6 +39,7 @@ class BinnedThreeMLPointSourceResponse(BinnedThreeMLSourceResponseInterface): """ def __init__(self, + data:EmCDSBinnedData, instrument_response: BinnedInstrumentResponseInterface, sc_history: SpacecraftHistory, energy_axis:Axis, @@ -70,10 +71,11 @@ def __init__(self, # Interface inputs - self._data = None self._source = None # Other implementation inputs + self._data = data + self._sc_ori = sc_history self._response = instrument_response self._energy_axis = energy_axis @@ -131,13 +133,6 @@ def set_source(self, source: Source): self._source = source - def set_data(self, data: DataInterface): - - if not isinstance(data, EmCDSBinnedData): - raise TypeError(f"Wrong data type '{type(data)}', expected {EmCDSBinnedData}.") - - self._data = data - def expectation(self, copy = True)-> Histogram: # TODO: check coordsys from axis # TODO: Earth occ always true in this case diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index e808e470..20c45c00 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -13,6 +13,7 @@ class BinnedThreeMLModelFolding(BinnedThreeMLModelFoldingInterface): def __init__(self, + data: BinnedDataInterface, point_source_response:BinnedThreeMLSourceResponseInterface = None, extended_source_response: BinnedThreeMLSourceResponseInterface = None): """ @@ -28,7 +29,6 @@ def __init__(self, """ # Interface inputs - self._data = None self._model = None # Implementation inputs @@ -41,15 +41,7 @@ def __init__(self, # https://github.com/threeML/threeML/issues/645 self._cached_model_dict = None self._source_responses = {} - self._expectation = None - - def set_data(self, data: BinnedDataInterface): - - if self._expectation is None or self._expectation.axes != data.axes: - # Needs new memory allocation, and recompute everything - self._expectation = Histogram(data.axes) - - self._data = data + self._expectation = Histogram(data.axes) def set_model(self, model: Model): """ @@ -92,7 +84,6 @@ def _cache_source_responses(self): psr_copy = self._psr.copy() psr_copy.set_source(source) - psr_copy.set_data(self._data) new_source_responses[name] = psr_copy elif isinstance(source, ExtendedSource): @@ -101,7 +92,6 @@ def _cache_source_responses(self): esr_copy = self._esr.copy() esr_copy.set_source(source) - esr_copy.set_data(self._data) new_source_responses[name] = esr_copy else: raise RuntimeError(f"The model contains the source {name} " @@ -110,7 +100,7 @@ def _cache_source_responses(self): self._source_responses = new_source_responses - def expectation(self, copy:bool = True)->Histogram: + def expectation(self, axes:Axes, copy:bool = True)->Histogram: """ Parameters @@ -123,7 +113,7 @@ def expectation(self, copy:bool = True)->Histogram: """ - if self._data is None or self._model is None: + if self._model is None: raise RuntimeError("Call set_data() and set_model() first") # See this issue for the caveats of comparing models @@ -151,7 +141,7 @@ def expectation(self, copy:bool = True)->Histogram: # Convolve all sources with the response for source_name,psr in self._source_responses.items(): - self._expectation += psr.expectation() + self._expectation += psr.expectation(axes) # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index c9f2a5d5..ca7d598b 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -34,20 +34,10 @@ def set_response(self, response: ExpectationInterface): super().set_response(response) # Checks type self._response = response - if self._data is None: - raise RuntimeError("Call set_data() before calling set_response()") - - self._response.set_data(self._data) - def set_background(self, bkg: BackgroundInterface): super().set_background(bkg) # Checks type self._bkg = bkg - if self._data is None: - raise RuntimeError("Call set_data() before calling set_background()") - - self._bkg.set_data(self._data) - @property def data (self) -> EventDataInterface: return self._data @property @@ -143,19 +133,14 @@ def get_log_like(self) -> float: if self._data is None or self._response is None: raise RuntimeError("Set data and response before calling this function.") - self._response.set_data(self._data) - - if self.has_bkg: - self._bkg.set_data(self._data) - # Compute expectation including background # If we don't have background, we won't modify the expectation, so # it's safe to use the internal cache. - expectation = self._response.expectation(copy = self.has_bkg) + expectation = self._response.expectation(self._data.axes, copy = self.has_bkg) if self.has_bkg: # We won't modify the bkg expectation, so it's safe to use the internal cache - expectation += self._bkg.expectation(copy = False) + expectation += self._bkg.expectation(self._data.axes, copy = False) # Get the arrays expectation = expectation.contents diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 7b3a0207..607c8582 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -194,7 +194,8 @@ def main(): # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation # can provide the response for an arbitrary directions, Ei and Pol values. - psr = BinnedThreeMLPointSourceResponse(instrument_response, + psr = BinnedThreeMLPointSourceResponse(data = data, + instrument_response = instrument_response, sc_history=sc_orientation, energy_axis = dr.axes['Ei'], polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, @@ -203,7 +204,7 @@ def main(): ##==== - response = BinnedThreeMLModelFolding(point_source_response = psr) + response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) like_fun = PoissonLikelihood() like_fun.set_data(data) @@ -331,7 +332,6 @@ def main(): spectral_shape=spectrum_inj) # Spectral model psr.set_source(source_inj) - psr.set_data(data) expectation_inj = psr.expectation(copy=True) @@ -431,7 +431,7 @@ def main(): # In[16]: - expectation_bkg = bkg.expectation(data, copy = True) + expectation_bkg = bkg.expectation(data.axes, copy = True) fig,ax = plt.subplots() diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 831d9b88..23cefabc 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -175,7 +175,8 @@ def main(): # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation # can provide the response for an arbitrary directions, Ei and Pol values. # NOTE: this is currently only implemented for data in local coords - psr = BinnedThreeMLPointSourceResponse(instrument_response, + psr = BinnedThreeMLPointSourceResponse(data = data, + instrument_response = instrument_response, sc_history=sc_orientation, energy_axis = dr.axes['Ei'], polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, @@ -184,7 +185,7 @@ def main(): ##==== - response = BinnedThreeMLModelFolding(point_source_response = psr) + response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) like_fun = PoissonLikelihood() like_fun.set_data(data) @@ -411,7 +412,7 @@ def main(): # In[16]: - expectation_bkg = bkg.expectation(data, copy = True) + expectation_bkg = bkg.expectation(data.axes, copy = True) fig,ax = plt.subplots() diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index 0d17edcd..e6a1f467 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -110,13 +110,14 @@ def main(): # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation # can provide the response for an arbitrary directions, Ei and Pol values. # NOTE: this is currently only implemented for data in local coords - psr = BinnedThreeMLPointSourceResponse(instrument_response, + psr = BinnedThreeMLPointSourceResponse(data = data, + instrument_response = instrument_response, sc_history=ori, energy_axis = dr.axes['Ei'], polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, nside = 2*data.axes['PsiChi'].nside) - response = BinnedThreeMLModelFolding(point_source_response = psr) + response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) like_fun = PoissonLikelihood() like_fun.set_data(data) diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 86ec54aa..96965504 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -17,7 +17,8 @@ BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, ThreeMLPluginInterface, - UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, Event) + UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, Event, + ThreeMLSourceResponseInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -56,20 +57,10 @@ class ToyEvent(Event): def __init__(self, x): self._x = x - @classmethod - def size(cls): - return 1 - @property def x(self): return self._x - def __getitem__(self, item): - if item is 0: - return self._x - else: - raise IndexError("Out of bounds. This Event type has a single value.") - class ToyData(DataInterface): @property @@ -89,9 +80,6 @@ def __init__(self): self._nevents = nevents_tot - def __getitem__(self, item): - return ToyEvent(self._events[item]) - def __iter__(self) -> Iterator[ToyEvent]: return iter(ToyEvent(x) for x in self._events) @@ -99,9 +87,9 @@ def __iter__(self) -> Iterator[ToyEvent]: def nevents(self) -> int: return self._nevents - def get_binned_data(self) -> "ToyBinnedData": + def get_binned_data(self, axes:Axes, *args, **kwargs) -> "ToyBinnedData": - binned_data = Histogram(toy_axis) + binned_data = Histogram(axes) binned_data.fill(self._events) return ToyBinnedData(binned_data) @@ -126,8 +114,6 @@ def data(self) -> Histogram: def axes(self) -> Axes: return self._data.axes - - class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): """ Models a uniform background @@ -142,9 +128,6 @@ def __init__(self): self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 - self._binned_data = None - self._event_data = None - # Doesn't need to be normalized self._unit_expectation_density = 1/(toy_axis.hi_lim - toy_axis.lo_lim) @@ -154,28 +137,7 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm - def set_data(self, data: DataInterface) -> None: - - if not isinstance(data, ToyData): - raise TypeError(f"This class only support data of type {ToyData}") - - if isinstance(data, BinnedDataInterface): - - if data.axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") - - self._binned_data = data - - if isinstance(data, EventDataInterface): - self._event_data = data - def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: - if data is None: - - if self._event_data: - raise RuntimeError("You need to either provide the data or call set_data() first.") - - data = self._event_data density = self._norm * self._unit_expectation_density @@ -186,7 +148,10 @@ def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iter def parameters(self) -> Dict[str, u.Quantity]: return {'norm': u.Quantity(self._norm)} - def expectation(self, copy = True) -> Histogram: + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") # Always a copy return self._unit_expectation * self._norm @@ -202,9 +167,6 @@ def __init__(self): self._unit_expectation = Histogram(toy_axis, contents=np.diff(norm.cdf(toy_axis.edges))) - self._binned_data = None - self._event_data = None - def ncounts(self) -> float: if self._source is None: @@ -215,30 +177,8 @@ def ncounts(self) -> float: ns_events = self._source.spectrum.main.shape.k.value return ns_events - def set_data(self, data: DataInterface) -> None: - - if not isinstance(data, ToyData): - raise TypeError(f"This class only support data of type {ToyData}") - - if isinstance(data, BinnedDataInterface): - - if data.axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") - - self._binned_data = data - - if isinstance(data, EventDataInterface): - self._event_data = data - def expectation_density(self, data:Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: - if data is None: - - if self._event_data is None: - raise RuntimeError("You need to either provide the data or call set_data() first.") - - data = self._event_data - # I expect in the real case it'll be more efficient to compute # (ncounts, ncounts*prob) than (ncounts, prob) @@ -254,7 +194,10 @@ def set_source(self, source: Source): self._source = source - def expectation(self, copy = True) -> Histogram: + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") if self._source is None: raise RuntimeError("Set a source first") @@ -273,19 +216,13 @@ def copy(self) -> "ToyPointSourceResponse": class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): - def __init__(self, psr: BinnedThreeMLSourceResponseInterface): - - if not isinstance(psr, ToyPointSourceResponse): - raise TypeError(f"Wrong psr type '{type(psr)}', expected {ToyPointSourceResponse}.") + def __init__(self, psr: ToyPointSourceResponse): self._model = None self._psr = psr self._psr_copies = {} - self._binned_data = None - self._event_data = None - def ncounts(self) -> float: ncounts = 0 @@ -295,32 +232,10 @@ def ncounts(self) -> float: return ncounts - def set_data(self, data: DataInterface) -> None: - - if not isinstance(data, ToyData): - raise TypeError(f"This class only support data of type {ToyData}") - - if isinstance(data, BinnedDataInterface): - self._binned_data = data - - if isinstance(data, EventDataInterface): - self._event_data = data - def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: - if self._event_data is None: - raise RuntimeError("Set data first") - self._cache_psr_copies() - if data is None: - - if self._event_data is None: - raise RuntimeError("You need to either provide the data or call set_data() first.") - - data = self._event_data - - # One by one in this example, but they can also be done in chunks (e.g. with itertools batched or islice) for expectations in zip(*[p.expectation_density(d) for p,d in zip(self._psr_copies.values(), itertools.tee(data))]): yield np.sum(expectations) @@ -342,27 +257,18 @@ def _cache_psr_copies(self): psr_copy = self._psr.copy() psr_copy.set_source(source) - if isinstance(psr_copy, BinnedThreeMLSourceResponseInterface) and self._binned_data is not None: - psr_copy.set_data(self._binned_data) - - if isinstance(psr_copy, UnbinnedThreeMLSourceResponseInterface) and self._event_data is not None: - psr_copy.set_data(self._event_data) - new_psr_copies[name] = psr_copy self._psr_copies = new_psr_copies - def expectation(self, copy = True) -> Histogram: - - if self._binned_data is None: - raise RuntimeError("Set data first") + def expectation(self, axes:Axes, copy = True) -> Histogram: self._cache_psr_copies() - expectation = Histogram(self._binned_data.axes) + expectation = Histogram(axes) for source_name,psr in self._psr_copies.items(): - expectation += psr.expectation(copy = False) + expectation += psr.expectation(Axes(toy_axis), copy = False) # Always a copy return expectation @@ -370,13 +276,13 @@ def expectation(self, copy = True) -> Histogram: # ======= Actual code. This is how the "tutorial" will look like ================ # Binned or unbinned -unbinned = True +unbinned = False # Set the inputs. These will eventually open file or set specific parameters, # but since we are generating the data and models on the fly, and most parameter # are hardcoded above withing the classes, then it's not necessary here. event_data = ToyEventData() -binned_data = event_data.get_binned_data() +binned_data = event_data.get_binned_data(Axes(toy_axis)) psr = ToyPointSourceResponse() response = ToyModelFolding(psr) bkg = ToyBkg() @@ -434,11 +340,9 @@ def expectation(self, copy = True) -> Histogram: # Plot results fig, ax = plt.subplots() binned_data.data.plot(ax) -response.set_data(binned_data) -bkg.set_data(binned_data) -expectation = response.expectation(binned_data) +expectation = response.expectation(binned_data.axes) if bkg is not None: - expectation = expectation + bkg.expectation(binned_data) + expectation = expectation + bkg.expectation(binned_data.axes) expectation.plot(ax) plt.show() From ee95e396328c297b9b3a09b99a8041d2814b738c Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 1 Oct 2025 09:00:07 -0400 Subject: [PATCH 074/133] Attempt to make crab example reproducible. - Use same path variable for inputs and fetch_wasabi. - Commit dataio yaml config files. Signed-off-by: Israel Martinez --- .../interfaces/examples/crab/background.yaml | 14 +++++ docs/api/interfaces/examples/crab/crab.yaml | 14 +++++ ...mple_crab_fit_threeml_plugin_interfaces.py | 53 ++++++++++--------- 3 files changed, 55 insertions(+), 26 deletions(-) create mode 100644 docs/api/interfaces/examples/crab/background.yaml create mode 100644 docs/api/interfaces/examples/crab/crab.yaml diff --git a/docs/api/interfaces/examples/crab/background.yaml b/docs/api/interfaces/examples/crab/background.yaml new file mode 100644 index 00000000..526971dd --- /dev/null +++ b/docs/api/interfaces/examples/crab/background.yaml @@ -0,0 +1,14 @@ +#----------# +# Data I/O: + +data_file: "/path/to/background/tra/file" # full path +ori_file: "/path/to/ori/file" # full path to orientation file +unbinned_output: 'fits' # 'fits' or 'hdf5' +time_bins: 3600 # time bin size in seconds. Takes int or list of bin edges. +energy_bins: [100., 158.489, 251.189, 398.107, 630.957, 1000., 1584.89, 2511.89, 3981.07, 6309.57, 10000.] # Takes list. Needs to match response. +phi_pix_size: 5 # binning of Compton scattering angle [deg] +nside: 8 # healpix binning of psi chi local +scheme: 'ring' # healpix binning of psi chi local +tmin: 1835487300.0 # Min time cut in seconds. +tmax: 1843467255.0 # Max time cut in seconds. +#----------# diff --git a/docs/api/interfaces/examples/crab/crab.yaml b/docs/api/interfaces/examples/crab/crab.yaml new file mode 100644 index 00000000..da2a86af --- /dev/null +++ b/docs/api/interfaces/examples/crab/crab.yaml @@ -0,0 +1,14 @@ +#----------# +# Data I/O: + +data_file: "/path/to/crab/tra/file" # full path +ori_file: "/path/to/ori/file" +unbinned_output: 'fits' # 'fits' or 'hdf5' +time_bins: 3600 # time bin size in seconds. Takes int or list of bin edges. +energy_bins: [100., 158.489, 251.189, 398.107, 630.957, 1000., 1584.89, 2511.89, 3981.07, 6309.57, 10000.] # Takes list. Needs to match response. +phi_pix_size: 5 # binning of Compton scattering angle [deg] +nside: 8 # healpix binning of psi chi local +scheme: 'ring' # healpix binning of psi chi local +tmin: 1835487300.0 # Min time cut in seconds. +tmax: 1843467255.0 # Max time cut in seconds. +#----------# diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 607c8582..1d6993f5 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -74,6 +74,7 @@ import os + def main(): # ## Download and read in binned data @@ -88,44 +89,51 @@ def main(): # Download the orientation file (684.38 MB) - # In[ ]: - - fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') + # In[ ]: + sc_orientation_path = data_path / "20280301_3_month_with_orbital_info.ori" + fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', + output=str(sc_orientation_path), checksum = '416fcc296fc37a056a069378a2d30cb2') # Download the binned Crab+background data (99.16 MB) # In[5]: - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_bkg_binned_data.hdf5', output=str(data_path / 'crab_bkg_binned_data.hdf5'), checksum = '85658e102414c4f746e64a7d29c607a4') - + crab_bkg_data_path = data_path / "crab_bkg_binned_data.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_bkg_binned_data.hdf5', + output=str(crab_bkg_data_path), checksum = '85658e102414c4f746e64a7d29c607a4') # Download the binned Crab data (13.16 MB) # In[7]: - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_binned_data.hdf5', output=str(data_path / 'crab_binned_data.hdf5'), checksum = '6e5bccb48556bdbd259519c52dec9dcb') + crab_data_path = data_path / "crab_binned_data.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_binned_data.hdf5', + output=str(crab_data_path), checksum = '6e5bccb48556bdbd259519c52dec9dcb') # Download the binned background data (89.10 MB) # In[9]: - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', output=str(data_path / 'bkg_binned_data.hdf5'), checksum = '54221d8556eb4ef520ef61da8083e7f4') - + bkg_data_path = data_path / "bkg_binned_data.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', + output=str(bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') # Download the response file (596.06 MB) # In[10]: - # Before and after Jeremy's changes - fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') - #fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5'), checksum = 'eb72400a1279325e9404110f909c7785') + dr_path_zip = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip" # path to detector response + dr_path = dr_path_zip.with_suffix('') + fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', + output = str(dr_path_zip), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + + # dr_path = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response + # fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + # output=str(dr_path), checksum = 'eb72400a1279325e9404110f909c7785') # Read in the spacecraft orientation file @@ -133,7 +141,7 @@ def main(): # In[4]: - sc_orientation = SpacecraftHistory.open(data_path / "20280301_3_month_with_orbital_info.ori") + sc_orientation = SpacecraftHistory.open(sc_orientation_path) # Create BinnedData objects for the Crab only, Crab+background, and background only. The Crab only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation @@ -150,29 +158,22 @@ def main(): # In[6]: - - crab.load_binned_data_from_hdf5(binned_data=data_path / "crab_binned_data.hdf5") - crab_bkg.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") - bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") + crab.load_binned_data_from_hdf5(binned_data=crab_data_path) + crab_bkg.load_binned_data_from_hdf5(binned_data=crab_bkg_data_path) + bkg.load_binned_data_from_hdf5(binned_data=bkg_data_path) # Define the path to the detector response # In[7]: - - # Before and after Jeremy's changes - dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") # path to detector response - #dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response - - # ## Perform spectral fit # ============ Interfaces ============== output_suffix = 'interfaces' - dr = FullDetectorResponse.open(dr) + dr = FullDetectorResponse.open(dr_path) instrument_response = BinnedInstrumentResponse(dr) # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin From 98eeb139b2a4aa56f5622c0f6ab0f660d93d66d2 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 1 Oct 2025 09:37:04 -0400 Subject: [PATCH 075/133] Keep the simplification by removing set_data/response/bkg from likelihood and moving to implementation's __init__ Signed-off-by: Israel Martinez --- cosipy/interfaces/likelihood_interface.py | 35 +------------ cosipy/statistics/likelihood_functions.py | 51 ++----------------- ...mple_crab_fit_threeml_plugin_interfaces.py | 5 +- ...ample_grb_fit_threeml_plugin_interfaces.py | 5 +- .../examples/toy/toy_interfaces_example.py | 10 ++-- 5 files changed, 11 insertions(+), 95 deletions(-) diff --git a/cosipy/interfaces/likelihood_interface.py b/cosipy/interfaces/likelihood_interface.py index 249099cc..4c5e4fcb 100644 --- a/cosipy/interfaces/likelihood_interface.py +++ b/cosipy/interfaces/likelihood_interface.py @@ -14,47 +14,14 @@ def get_log_like(self) -> float:... @property def nobservations(self) -> int: """For BIC and other statistics""" - def set_data(self, data: DataInterface):... - def set_response(self, response: ExpectationInterface): ... - def set_background(self, bkg: BackgroundInterface): ... - @property - def data (self) -> DataInterface: ... - @property - def response(self) -> ExpectationInterface: ... - @property - def bkg (self) -> BackgroundInterface: ... @runtime_checkable class BinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ - Needs to check that data, response and bkg are compatible - """ - def set_data(self, data: DataInterface): - if not isinstance(data, BinnedDataInterface): - raise TypeError("Incorrect data type for binned likelihood.") - - def set_response(self, response: ExpectationInterface): - if not isinstance(response, BinnedExpectationInterface): - raise TypeError("Incorrect data type for binned likelihood.") - def set_background(self, bkg: BackgroundInterface): - if not isinstance(bkg, BinnedBackgroundInterface): - raise TypeError("Incorrect background type for binned likelihood.") + """ @runtime_checkable class UnbinnedLikelihoodInterface(LikelihoodInterface, Protocol): """ - Needs to check that data, response and bkg are compatible """ - def set_data(self, data: DataInterface): - if not isinstance(data, EventDataInterface): - raise TypeError("Incorrect data type for unbinned likelihood.") - - def set_response(self, response: ExpectationInterface): - if not isinstance(response, ExpectationDensityInterface): - raise TypeError("Incorrect data type for unbinned likelihood.") - - def set_background(self, bkg: BackgroundInterface): - if not isinstance(bkg, BackgroundDensityInterface): - raise TypeError("Incorrect background type for unbinned likelihood.") - diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index ca7d598b..413fe9b5 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -2,6 +2,7 @@ import logging import operator +from cosipy import UnBinnedData from cosipy.interfaces.expectation_interface import ExpectationInterface, ExpectationDensityInterface logger = logging.getLogger(__name__) @@ -20,23 +21,11 @@ 'PoissonLikelihood'] class UnbinnedLikelihood(UnbinnedLikelihoodInterface): - def __init__(self): + def __init__(self, data:EventDataInterface, response:ExpectationDensityInterface, bkg:BackgroundDensityInterface = None): - self._data = None - self._bkg = None - self._response = None - - def set_data(self, data: DataInterface): - super().set_data(data) # Checks type self._data = data - - def set_response(self, response: ExpectationInterface): - super().set_response(response) # Checks type - self._response = response - - def set_background(self, bkg: BackgroundInterface): - super().set_background(bkg) # Checks type self._bkg = bkg + self._response = response @property def data (self) -> EventDataInterface: return self._data @@ -51,17 +40,10 @@ def has_bkg(self): @property def nobservations(self) -> int: - - if self._data is None: - raise RuntimeError("Set the data before calling this function.") - return self._data.nevents def get_log_like(self) -> float: - if self._data is None or self._response is None: - raise RuntimeError("Set data and response before calling this function.") - # Compute expectation including background ntot = self._response.ncounts() @@ -78,11 +60,6 @@ def get_log_like(self) -> float: density = np.fromiter(map(operator.add, signal_density, bkg_density), dtype=float) - # signal_density = np.fromiter(self._response.expectation_density(), dtype=float) - # bkg_density = np.fromiter(self._bkg.expectation_density(), dtype=float) - # - # density = signal_density + bkg_density - else: density = np.fromiter(self._response.expectation_density(), dtype=float) @@ -92,23 +69,11 @@ def get_log_like(self) -> float: class PoissonLikelihood(BinnedLikelihoodInterface): - def __init__(self): - - self._data = None - self._bkg = None - self._response = None + def __init__(self, data:BinnedDataInterface, response:BinnedExpectationInterface, bkg:BinnedBackgroundInterface = None): - def set_data(self, data: DataInterface): - super().set_data(data) # Checks type self._data = data - - def set_response(self, response: ExpectationInterface): - super().set_response(response) # Checks type - self._response = response - - def set_background(self, bkg: BackgroundInterface): - super().set_background(bkg) # Checks type self._bkg = bkg + self._response = response @property def data (self) -> BinnedDataInterface: return self._data @@ -123,16 +88,10 @@ def has_bkg(self): @property def nobservations(self) -> int: - if self._data is None: - raise RuntimeError("Set the data before calling this function.") - return self._data.data.contents.size def get_log_like(self) -> float: - if self._data is None or self._response is None: - raise RuntimeError("Set data and response before calling this function.") - # Compute expectation including background # If we don't have background, we won't modify the expectation, so # it's safe to use the internal cache. diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 1d6993f5..6d28b7a2 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -207,10 +207,7 @@ def main(): response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) - like_fun = PoissonLikelihood() - like_fun.set_data(data) - like_fun.set_response(response) - like_fun.set_background(bkg) + like_fun = PoissonLikelihood(data, response, bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index e6a1f467..cb8c2fcd 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -119,10 +119,7 @@ def main(): response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) - like_fun = PoissonLikelihood() - like_fun.set_data(data) - like_fun.set_response(response) - like_fun.set_background(bkg) + like_fun = PoissonLikelihood(data, response, bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 96965504..88f29bc6 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -276,7 +276,7 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: # ======= Actual code. This is how the "tutorial" will look like ================ # Binned or unbinned -unbinned = False +unbinned = True # Set the inputs. These will eventually open file or set specific parameters, # but since we are generating the data and models on the fly, and most parameter @@ -315,14 +315,10 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: # Fit if unbinned: - like_fun = UnbinnedLikelihood() - like_fun.set_data(event_data) + like_fun = UnbinnedLikelihood(event_data, response, bkg) else: - like_fun = PoissonLikelihood() - like_fun.set_data(binned_data) + like_fun = PoissonLikelihood(binned_data, response, bkg) -like_fun.set_response(response) -like_fun.set_background(bkg) cosi = ThreeMLPluginInterface('cosi', like_fun) # Before the fit, you can set the parameters initial values, bounds, etc. From edf1157a3509aed9e70a66b965c59ba1f3707ea3 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 1 Oct 2025 14:23:51 -0400 Subject: [PATCH 076/133] Improve event selector and event metadata interface Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 63 ++++++++--------- cosipy/interfaces/event.py | 67 ++++++++++--------- cosipy/interfaces/event_selection.py | 29 +++++++- cosipy/interfaces/expectation_interface.py | 4 +- .../examples/toy/toy_interfaces_example.py | 19 ++++-- 5 files changed, 110 insertions(+), 72 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 5b702508..46fcae99 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,12 +1,11 @@ import itertools -from abc import abstractmethod from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable import numpy as np -from astropy.units import Unit +from astropy.units import Unit, Quantity from . import EventWithEnergy -from .event import Event, FancyEnergyDataMixin, FancyTimeDataMixin, TimeTagEvent +from .event import Event, TimeTagEvent from histpy import Histogram, Axes from astropy.time import Time @@ -21,9 +20,12 @@ __all__ = ["DataInterface", "EventDataInterface", "BinnedDataInterface", -] + "TimeTagEventData", + "EventDataWithEnergy" + ] -class DataInterface: +@runtime_checkable +class DataInterface(Protocol): @property def event_type(self) -> Type[Event]: @@ -31,14 +33,15 @@ def event_type(self) -> Type[Event]: Type returned by __iter__ in the event data case """ -class BinnedDataInterface(DataInterface): +@runtime_checkable +class BinnedDataInterface(DataInterface, Protocol): @property def data(self) -> Histogram:... @property def axes(self) -> Axes:... - -class EventDataInterface(DataInterface, Iterable): +@runtime_checkable +class EventDataInterface(DataInterface, Protocol): def __iter__(self) -> Iterator[Event]: """ @@ -53,51 +56,51 @@ def __getitem__(self, item: int) -> Event: return next(itertools.islice(self, item, None)) @property - @abstractmethod def nevents(self) -> int: """ - After selection - """ - - def __len__(self): - return self.nevents + Total number of events yielded by __iter__ - def set_selection(self, selection: Union["EventSelectorInterface", None]) -> None: - """ - None would drop the selection. Implementation might not implement the ability to change or drop - a selection --e.g. the underlying data was discarded for efficiency reasons. - """ - - @property - def selection(self) -> Union["EventSelectorInterface", None]: - """ - The current selection set + Convenience method. Pretty slow in general. It's suggested that + the implementations override it """ + return sum(1 for _ in iter(self)) def get_binned_data(self, axes:Axes, *args, **kwargs) -> BinnedDataInterface: raise NotImplementedError -class TimeTagEventData(FancyTimeDataMixin, EventDataInterface): +@runtime_checkable +class TimeTagEventData(EventDataInterface, Protocol): def __iter__(self) -> Iterator[TimeTagEvent]:... @property - @abstractmethod def jd1(self) -> Iterable[float]: ... @property - @abstractmethod def jd2(self) -> Iterable[float]: ... -class EventDataWithEnergy(FancyEnergyDataMixin, EventDataInterface): + @property + def time(self) -> Time: + """ + Add fancy time + """ + return Time(self.jd1, self.jd2, format = 'jd') + +@runtime_checkable +class EventDataWithEnergy(EventDataInterface, Protocol): def __iter__(self) -> Iterator[EventWithEnergy]:... @property - @abstractmethod def energy_value(self) -> Iterable[float]:... @property - @abstractmethod def energy_unit(self) -> Unit:... + @property + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_value, self.energy_unit) + diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index bb7bfa86..a0bcb698 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Sequence, Union +from typing import Sequence, Union, Protocol +from typing_extensions import runtime_checkable from astropy.time import Time from astropy.units import Quantity, Unit @@ -10,20 +11,43 @@ "EventWithEnergy", ] -class Event(ABC): +class EventMetadata: + + def __init__(self): + self._metadata = {} + + def __getitem__(self, key): + return self._metadata[key] + + def __setitem__(self, key, value): + self._metadata[key] = value + setattr(self, key, value) + + def __delitem__(self, key): + if key in self._metadata: + del self._metadata[key] + delattr(self, key) + + def __repr__(self): + return f"{self.__class__.__name__}({self._metadata})" + +@runtime_checkable +class Event(Protocol): """ Derived classes implement all accessors """ -class FancyTimeDataMixin(ABC): + @property + def metadata(self) -> EventMetadata:... + +@runtime_checkable +class TimeTagEvent(Event, Protocol): @property - @abstractmethod - def jd1(self) -> Union[float, Sequence[float]]:... + def jd1(self) -> float:... @property - @abstractmethod - def jd2(self) -> Union[float, Sequence[float]]:... + def jd2(self) -> float:... @property def time(self) -> Time: @@ -32,25 +56,13 @@ def time(self) -> Time: """ return Time(self.jd1, self.jd2, format = 'jd') -class TimeTagEvent(FancyTimeDataMixin): +@runtime_checkable +class EventWithEnergy(Event, Protocol): @property - @abstractmethod - def jd1(self) -> float:... - - @property - @abstractmethod - def jd2(self) -> float:... - - -class FancyEnergyDataMixin(ABC): - - @property - @abstractmethod - def energy_value(self) -> Union[float, Sequence[float]]:... + def energy_value(self) -> float:... @property - @abstractmethod def energy_unit(self) -> Unit:... @property @@ -60,14 +72,3 @@ def energy(self) -> Quantity: """ return Quantity(self.energy_value, self.energy_unit) - -class EventWithEnergy(FancyEnergyDataMixin): - - @property - @abstractmethod - def energy_value(self) -> float:... - - @property - @abstractmethod - def energy_unit(self) -> Unit:... - diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py index 16d3ba0a..8e8a97dc 100644 --- a/cosipy/interfaces/event_selection.py +++ b/cosipy/interfaces/event_selection.py @@ -1,11 +1,34 @@ -from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional +import itertools +from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional, \ + Tuple from . import Event @runtime_checkable class EventSelectorInterface(Protocol): - def select(self, data: Optional[Iterable[Event]]) -> Iterable[bool]: + def select(self, event:Union[Event, Iterable[Event]]) -> Union[bool, Iterable[bool]]: """ - Returns True to keep an event, False to filter it out. + True to keep an event + + Return a single value for a single Event. + As many values for an Iterable of events + """ + + def mask(self, events: Iterable[Event]) -> Iterable[Tuple[bool,Event]]: + """ + Returns an iterable of tuples. Each tuple has 2 elements: + - First: True to keep an event, False to filter it out. + - Second: the event itself. + """ + events1, events2 = itertools.tee(events, 2) + for selected, event in zip(self.select(events1), events2): + yield selected, event + + def __call__(self, events: Iterable[Event]) -> Iterable[Event]: + """ + Skips events that were not selected """ + for selected,event in self.mask(events): + if selected: + yield event \ No newline at end of file diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 5d45ffb0..1dfe1bcd 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -43,8 +43,10 @@ def ncounts(self) -> float: Total expected counts """ - def expectation_density(self, data: Iterable[Event]) -> Iterable[float]: + def expectation_density(self, events: Union[Event, Iterable[Event]]) -> Union[Event, Iterable[float]]: """ + Return a single value for a single Event. + As many values for an Iterable of events """ def get_binned_expectation(self, axes:Axes, *args, **kwargs): diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 88f29bc6..8f62f7a9 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -137,12 +137,15 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm - def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: + def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: density = self._norm * self._unit_expectation_density - for _ in data: - yield density + if isinstance(data, Event): + return density + else: + for _ in data: + yield density @property def parameters(self) -> Dict[str, u.Quantity]: @@ -177,7 +180,10 @@ def ncounts(self) -> float: ns_events = self._source.spectrum.main.shape.k.value return ns_events - def expectation_density(self, data:Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: + def expectation_density(self, data:Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: + + if isinstance(data, Event): + return next(iter(self.expectation_density([data]))) # I expect in the real case it'll be more efficient to compute # (ncounts, ncounts*prob) than (ncounts, prob) @@ -232,10 +238,13 @@ def ncounts(self) -> float: return ncounts - def expectation_density(self, data: Optional[Iterable[ToyEvent]] = None) -> Iterable[float]: + def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: self._cache_psr_copies() + if isinstance(data, Event): + return next(iter(self.expectation_density([data]))) + # One by one in this example, but they can also be done in chunks (e.g. with itertools batched or islice) for expectations in zip(*[p.expectation_density(d) for p,d in zip(self._psr_copies.values(), itertools.tee(data))]): yield np.sum(expectations) From 443571c38fb5c9dd82b844472d70a0abfcb70532 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 2 Oct 2025 10:32:00 -0400 Subject: [PATCH 077/133] work out toy example time selection - remove get_bin_data from interfaces - add event metadata Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 12 +- cosipy/interfaces/event_selection.py | 4 +- cosipy/interfaces/expectation_interface.py | 3 - .../examples/toy/toy_interfaces_example.py | 144 ++++++++++++++---- 4 files changed, 120 insertions(+), 43 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 46fcae99..bb21521b 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -1,5 +1,5 @@ import itertools -from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable +from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable, ClassVar import numpy as np from astropy.units import Unit, Quantity @@ -27,11 +27,8 @@ @runtime_checkable class DataInterface(Protocol): - @property - def event_type(self) -> Type[Event]: - """ - Type returned by __iter__ in the event data case - """ + # Type returned by __iter__ in the event data case + event_type = ClassVar[Type] @runtime_checkable class BinnedDataInterface(DataInterface, Protocol): @@ -65,9 +62,6 @@ def nevents(self) -> int: """ return sum(1 for _ in iter(self)) - def get_binned_data(self, axes:Axes, *args, **kwargs) -> BinnedDataInterface: - raise NotImplementedError - @runtime_checkable class TimeTagEventData(EventDataInterface, Protocol): diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py index 8e8a97dc..f7b80ecb 100644 --- a/cosipy/interfaces/event_selection.py +++ b/cosipy/interfaces/event_selection.py @@ -31,4 +31,6 @@ def __call__(self, events: Iterable[Event]) -> Iterable[Event]: """ for selected,event in self.mask(events): if selected: - yield event \ No newline at end of file + yield event + + diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 1dfe1bcd..9c062185 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -49,7 +49,4 @@ def expectation_density(self, events: Union[Event, Iterable[Event]]) -> Union[Ev As many values for an Iterable of events """ - def get_binned_expectation(self, axes:Axes, *args, **kwargs): - raise NotImplementedError - diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 8f62f7a9..0da4a6d3 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -5,9 +5,14 @@ from astromodels import LinearPolarization, SpectralComponent, Parameter from astromodels.core.polarization import Polarization import astropy.units as u +from astropy.time import Time +from astropy.units import Quantity + from cosipy import SpacecraftHistory from cosipy.interfaces.background_interface import BackgroundDensityInterface from cosipy.interfaces.data_interface import EventDataInterface, DataInterface +from cosipy.interfaces.event import EventMetadata +from cosipy.interfaces.event_selection import EventSelectorInterface from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood @@ -18,7 +23,7 @@ BinnedThreeMLSourceResponseInterface, ThreeMLPluginInterface, UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, Event, - ThreeMLSourceResponseInterface) + ThreeMLSourceResponseInterface, TimeTagEvent) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -49,50 +54,90 @@ nevents_bkg = 1000 nevents_tot = nevents_signal + nevents_bkg -class ToyEvent(Event): +class ToyEvent(TimeTagEvent, Event): """ Unit-less 1D data of a measurement called "x" (could be anything) """ - def __init__(self, x): + def __init__(self, x, time:Time): self._x = x + self._jd1 = time.jd1 + self._jd2 = time.jd2 + self._metadata = EventMetadata() + + @property + def metadata(self) -> EventMetadata: + return self._metadata @property def x(self): return self._x -class ToyData(DataInterface): + @property + def jd1(self): + return self._jd1 @property - def event_type(self) -> Type[Event]: - return ToyEvent + def jd2(self): + return self._jd2 + +class ToyData(DataInterface): + + event_type = ToyEvent class ToyEventData(EventDataInterface, ToyData): # Random data. Normal signal on top of uniform bkg - def __init__(self): + def __init__(self, selector:EventSelectorInterface = None): rng = np.random.default_rng() - self._events = np.append(rng.normal(size = nevents_signal), rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size = nevents_bkg)) + self._x = np.append(rng.normal(size = nevents_signal), rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size = nevents_bkg)) + + self._tstart = Time("2000-01-01T00:00:00") + self._tstop = Time("2000-01-02T00:00:00") + + self._timestamps = self._tstart + np.random.uniform(size = nevents_tot)*u.day - np.random.shuffle(self._events) + np.random.shuffle(self._x) - self._nevents = nevents_tot + self._nevents = None # After selection + + self._selector = selector + + @property + def tsart(self): + return self._tstart + + @property + def tstop(self): + return self._tstop + + def _iter_all(self) -> Iterator[ToyEvent]: + + for x,t in zip(self._x, self._timestamps): + yield ToyEvent(x,t) def __iter__(self) -> Iterator[ToyEvent]: - return iter(ToyEvent(x) for x in self._events) + + nselected = 0 + for event in self._selector(self._iter_all()): + nselected += 1 + yield event + + self._nevents = nselected @property def nevents(self) -> int: - return self._nevents + if self._nevents is None: + # Not cached yet + self._nevents = sum(1 for _ in self) - def get_binned_data(self, axes:Axes, *args, **kwargs) -> "ToyBinnedData": - - binned_data = Histogram(axes) - binned_data.fill(self._events) + return self._nevents - return ToyBinnedData(binned_data) + @property + def x(self): + return np.asarray([x for sel,x in zip(self._selector.select(self), self._x) if sel]) class ToyBinnedData(BinnedDataInterface, ToyData): @@ -123,19 +168,20 @@ class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): # code readability, especially if you use an IDE. """ - def __init__(self): + def __init__(self, duration:Quantity): + self._unit_expectation = Histogram(toy_axis) self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 - # Doesn't need to be normalized - self._unit_expectation_density = 1/(toy_axis.hi_lim - toy_axis.lo_lim) + self._sel_fraction = (duration/(1*u.day)).to_value('') + self._unit_expectation_density = self._sel_fraction/(toy_axis.hi_lim - toy_axis.lo_lim) def set_parameters(self, **parameters:u.Quantity) -> None: self._norm = parameters['norm'].value def ncounts(self) -> float: - return self._norm + return self._norm * self._sel_fraction def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: @@ -157,7 +203,7 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: raise ValueError("Wrong axes. I have fixed axes.") # Always a copy - return self._unit_expectation * self._norm + return self._unit_expectation * self._norm * self._sel_fraction class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): """ @@ -165,10 +211,11 @@ class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThree The normalization --the "flux"-- is the only free parameters """ - def __init__(self): + def __init__(self, duration:Quantity): self._source = None + self._sel_fraction = (duration/(1*u.day)).to_value('') self._unit_expectation = Histogram(toy_axis, - contents=np.diff(norm.cdf(toy_axis.edges))) + contents= self._sel_fraction * np.diff(norm.cdf(toy_axis.edges))) def ncounts(self) -> float: @@ -177,7 +224,7 @@ def ncounts(self) -> float: # Get the latest values of the flux # Remember that _model can be modified externally between calls. - ns_events = self._source.spectrum.main.shape.k.value + ns_events = self._sel_fraction * self._source.spectrum.main.shape.k.value return ns_events def expectation_density(self, data:Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: @@ -282,19 +329,56 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: # Always a copy return expectation +def get_binned_data(event_data:ToyEventData, axis:Axis) -> ToyBinnedData: + """ + Only bins x axis + """ + + binned_data = Histogram(axis) + binned_data.fill(event_data.x) + + return ToyBinnedData(binned_data) + +class ToyTimeSelector(EventSelectorInterface): + + def __init__(self, tstart:Time = None, tstop:Time = None): + self._tstart = tstart + self._tstop = tstop + + def _select(self, event:TimeTagEvent) -> bool: + # Single event + return (self._tstart is None or event.time > self._tstart) and (self._tstop is None or event.time <= self._tstop) + + def select(self, events:Union[TimeTagEvent, Iterable[TimeTagEvent]]) -> Union[bool, Iterable[bool]]: + if isinstance(events, Event): + # Single event + return self._select(events) + else: + # Multiple + for event in events: + yield self._select(event) + # ======= Actual code. This is how the "tutorial" will look like ================ # Binned or unbinned -unbinned = True +unbinned = False # Set the inputs. These will eventually open file or set specific parameters, # but since we are generating the data and models on the fly, and most parameter # are hardcoded above withing the classes, then it's not necessary here. -event_data = ToyEventData() -binned_data = event_data.get_binned_data(Axes(toy_axis)) -psr = ToyPointSourceResponse() +tstart = Time("2000-01-01T01:00:00") +tstop = Time("2000-01-01T10:00:00") +duration = tstop - tstart +selector = ToyTimeSelector(tstart = tstart, tstop = tstop) + +event_data = ToyEventData(selector=selector) + +print(sum(1 for _ in event_data), nevents_tot) + +binned_data = get_binned_data(event_data, toy_axis) +psr = ToyPointSourceResponse(duration = duration) response = ToyModelFolding(psr) -bkg = ToyBkg() +bkg = ToyBkg(duration = duration) ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV From d6d3974b4228c18a4964c6adf44f87407a1a22a5 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 2 Oct 2025 10:43:16 -0400 Subject: [PATCH 078/133] Use main() in toy - add profiles - fix hidden issue before separating main() Signed-off-by: Israel Martinez --- .../examples/toy/toy_interfaces_example.py | 149 ++++++++++-------- 1 file changed, 81 insertions(+), 68 deletions(-) diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 0da4a6d3..e30713f9 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -298,13 +298,13 @@ def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) def set_model(self, model: Model): - self._model = None + self._model = model def _cache_psr_copies(self): new_psr_copies = {} - for name,source in model.sources.items(): + for name,source in self._model.sources.items(): if name in self._psr_copies: # Use cache @@ -360,92 +360,105 @@ def select(self, events:Union[TimeTagEvent, Iterable[TimeTagEvent]]) -> Union[bo # ======= Actual code. This is how the "tutorial" will look like ================ -# Binned or unbinned -unbinned = False +def main(): + + # Binned or unbinned + unbinned = False + + # Set the inputs. These will eventually open file or set specific parameters, + # but since we are generating the data and models on the fly, and most parameter + # are hardcoded above withing the classes, then it's not necessary here. + tstart = Time("2000-01-01T01:00:00") + tstop = Time("2000-01-01T10:00:00") + duration = tstop - tstart + selector = ToyTimeSelector(tstart = tstart, tstop = tstop) + + event_data = ToyEventData(selector=selector) + + print(sum(1 for _ in event_data), nevents_tot) + + binned_data = get_binned_data(event_data, toy_axis) + psr = ToyPointSourceResponse(duration = duration) + response = ToyModelFolding(psr) + bkg = ToyBkg(duration = duration) -# Set the inputs. These will eventually open file or set specific parameters, -# but since we are generating the data and models on the fly, and most parameter -# are hardcoded above withing the classes, then it's not necessary here. -tstart = Time("2000-01-01T01:00:00") -tstop = Time("2000-01-01T10:00:00") -duration = tstop - tstart -selector = ToyTimeSelector(tstart = tstart, tstop = tstop) + ## Source model + ## We'll just use the K value in u.cm / u.cm / u.s / u.keV + spectrum = Constant() + spectrum.k.value = 1 -event_data = ToyEventData(selector=selector) + polarized = False -print(sum(1 for _ in event_data), nevents_tot) + if polarized: + polarization = LinearPolarization(10, 10) + polarization.degree.value = 0. + polarization.angle.value = 10 -binned_data = get_binned_data(event_data, toy_axis) -psr = ToyPointSourceResponse(duration = duration) -response = ToyModelFolding(psr) -bkg = ToyBkg(duration = duration) + spectral_component = SpectralComponent('arbitrary_spectrum_name', spectrum, polarization) + source = PointSource('arbitrary_source_name', 0, 0, components=[spectral_component]) + else: -## Source model -## We'll just use the K value in u.cm / u.cm / u.s / u.keV -spectrum = Constant() -spectrum.k.value = 1 + source = PointSource("arbitrary_source_name", + l=0, b=0, # Doesn't matter + spectral_shape=spectrum) -polarized = False + model = Model(source) -if polarized: - polarization = LinearPolarization(10, 10) - polarization.degree.value = 0. - polarization.angle.value = 10 + # Optional: Perform a background-only or a null-background fit + #bkg = None # Uncomment for no bkg + #model = Model() # Uncomment for bkg-only hypothesis - spectral_component = SpectralComponent('arbitrary_spectrum_name', spectrum, polarization) - source = PointSource('arbitrary_source_name', 0, 0, components=[spectral_component]) -else: + # Fit + if unbinned: + like_fun = UnbinnedLikelihood(event_data, response, bkg) + else: + like_fun = PoissonLikelihood(binned_data, response, bkg) - source = PointSource("arbitrary_source_name", - l=0, b=0, # Doesn't matter - spectral_shape=spectrum) + cosi = ThreeMLPluginInterface('cosi', like_fun) -model = Model(source) + # Before the fit, you can set the parameters initial values, bounds, etc. + # This is passed to the minimizer. + # In addition to model. Nuisance. + cosi.bkg_parameter['norm'].value = 1 -# Optional: Perform a background-only or a null-background fit -#bkg = None # Uncomment for no bkg -#model = Model() # Uncomment for bkg-only hypothesis + plugins = DataList(cosi) + like = JointLikelihood(model, plugins) -# Fit -if unbinned: - like_fun = UnbinnedLikelihood(event_data, response, bkg) -else: - like_fun = PoissonLikelihood(binned_data, response, bkg) + # Run minimizer + like.fit() + print(like.minimizer) -cosi = ThreeMLPluginInterface('cosi', like_fun) + # Plot results + plot = False + if plot: -# Before the fit, you can set the parameters initial values, bounds, etc. -# This is passed to the minimizer. -# In addition to model. Nuisance. -cosi.bkg_parameter['norm'].value = 1 + fig, ax = plt.subplots() + binned_data.data.plot(ax) + expectation = response.expectation(binned_data.axes) + if bkg is not None: + expectation = expectation + bkg.expectation(binned_data.axes) + expectation.plot(ax) + plt.show() -plugins = DataList(cosi) -like = JointLikelihood(model, plugins) + # Grid + loglike = Histogram([np.linspace(.9*nevents_signal, 1.1*nevents_signal, 30), np.linspace(.9*nevents_bkg, 1.1*nevents_bkg, 31)], labels = ['s', 'b']) -# Run minimizer -like.fit() -print(like.minimizer) + for i,s in enumerate(loglike.axes['s'].centers): + for j,b in enumerate(loglike.axes['b'].centers): -# Plot results -fig, ax = plt.subplots() -binned_data.data.plot(ax) -expectation = response.expectation(binned_data.axes) -if bkg is not None: - expectation = expectation + bkg.expectation(binned_data.axes) -expectation.plot(ax) -plt.show() + spectrum.k.value = s + cosi.bkg_parameter['norm'].value = b -# Grid -loglike = Histogram([np.linspace(.9*nevents_signal, 1.1*nevents_signal, 30), np.linspace(.9*nevents_bkg, 1.1*nevents_bkg, 31)], labels = ['s', 'b']) + loglike[i,j] = cosi.get_log_like() -for i,s in enumerate(loglike.axes['s'].centers): - for j,b in enumerate(loglike.axes['b'].centers): + loglike.plot() - spectrum.k.value = s - cosi.bkg_parameter['norm'].value = b + plt.show() - loglike[i,j] = cosi.get_log_like() +if __name__ == "__main__": -loglike.plot() + import cProfile + cProfile.run('main()', filename = "prof_toy.prof") + exit() -plt.show() \ No newline at end of file + main() \ No newline at end of file From 63a948de4df2b8945cf1abc51aa7364ed2df4c86 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 2 Oct 2025 11:03:34 -0400 Subject: [PATCH 079/133] Optimize the selection of toy example Signed-off-by: Israel Martinez --- .../examples/toy/toy_interfaces_example.py | 62 ++++++++++++++----- 1 file changed, 47 insertions(+), 15 deletions(-) diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index e30713f9..abab2a0a 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -7,10 +7,11 @@ import astropy.units as u from astropy.time import Time from astropy.units import Quantity +from numpy.ma.core import logical_or from cosipy import SpacecraftHistory from cosipy.interfaces.background_interface import BackgroundDensityInterface -from cosipy.interfaces.data_interface import EventDataInterface, DataInterface +from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventData from cosipy.interfaces.event import EventMetadata from cosipy.interfaces.event_selection import EventSelectorInterface @@ -85,7 +86,7 @@ class ToyData(DataInterface): event_type = ToyEvent -class ToyEventData(EventDataInterface, ToyData): +class ToyEventData(TimeTagEventData, ToyData): # Random data. Normal signal on top of uniform bkg def __init__(self, selector:EventSelectorInterface = None): @@ -103,7 +104,19 @@ def __init__(self, selector:EventSelectorInterface = None): self._nevents = None # After selection - self._selector = selector + # Filter the events once and for all + # It can also be done on the fly if needed + new_x = [] + new_jd1 = [] + new_jd2 = [] + + for event in selector(self): + new_x.append(event.x) + new_jd1.append(event.jd1) + new_jd2.append(event.jd2) + + self._x = np.asarray(new_x) + self._timestamps = Time(new_jd1, new_jd2, format = 'jd') @property def tsart(self): @@ -113,17 +126,11 @@ def tsart(self): def tstop(self): return self._tstop - def _iter_all(self) -> Iterator[ToyEvent]: - - for x,t in zip(self._x, self._timestamps): - yield ToyEvent(x,t) - def __iter__(self) -> Iterator[ToyEvent]: - nselected = 0 - for event in self._selector(self._iter_all()): + for x,t in zip(self._x, self._timestamps): nselected += 1 - yield event + yield ToyEvent(x,t) self._nevents = nselected @@ -137,7 +144,15 @@ def nevents(self) -> int: @property def x(self): - return np.asarray([x for sel,x in zip(self._selector.select(self), self._x) if sel]) + return self._x + + @property + def jd1(self) -> Iterable[float]: + return self._timestamps.jd1 + + @property + def jd2(self) -> Iterable[float]: + return self._timestamps.jd2 class ToyBinnedData(BinnedDataInterface, ToyData): @@ -347,16 +362,33 @@ def __init__(self, tstart:Time = None, tstop:Time = None): def _select(self, event:TimeTagEvent) -> bool: # Single event - return (self._tstart is None or event.time > self._tstart) and (self._tstop is None or event.time <= self._tstop) + return next(iter(self.select([event]))) def select(self, events:Union[TimeTagEvent, Iterable[TimeTagEvent]]) -> Union[bool, Iterable[bool]]: + if isinstance(events, Event): # Single event return self._select(events) else: # Multiple + + # Caching results optimizes the result sometimes + # The user can pass the iterable in chunks + jd1 = [] + jd2 = [] + for event in events: - yield self._select(event) + jd1.append(event.jd1) + jd2.append(event.jd2) + + time = Time(jd1, jd2, format = 'jd') + + selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), + np.logical_or(self._tstop is None, time <= self._tstop)) + + for sel in selected: + yield sel + # ======= Actual code. This is how the "tutorial" will look like ================ @@ -429,7 +461,7 @@ def main(): print(like.minimizer) # Plot results - plot = False + plot = True if plot: fig, ax = plt.subplots() From b65695a6ca1c709d3602a28a75297b271c4bafb4 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 3 Oct 2025 15:47:00 -0400 Subject: [PATCH 080/133] Rework of events stream to make it possible to cache and add DC3 data implementation Signed-off-by: Israel Martinez --- cosipy/data_io/EmCDSUnbinnedData.py | 207 +++++++++ cosipy/data_io/UnBinnedData.py | 2 +- cosipy/interfaces/data_interface.py | 83 +++- cosipy/interfaces/event.py | 84 +++- cosipy/interfaces/event_selection.py | 10 +- cosipy/interfaces/expectation_interface.py | 15 +- .../instrument_response_interface.py | 58 ++- cosipy/interfaces/photon_list.py | 84 ++++ cosipy/interfaces/photon_parameters.py | 57 +++ cosipy/interfaces/threeml_plugin_interface.py | 27 +- cosipy/statistics/likelihood_functions.py | 79 ++-- ...mple_crab_fit_threeml_plugin_interfaces.py | 5 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 404 +----------------- ...ample_grb_fit_threeml_plugin_interfaces.py | 5 +- .../examples/toy/toy_interfaces_example.py | 200 +++++---- 15 files changed, 762 insertions(+), 558 deletions(-) create mode 100644 cosipy/data_io/EmCDSUnbinnedData.py create mode 100644 cosipy/interfaces/photon_list.py create mode 100644 cosipy/interfaces/photon_parameters.py diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py new file mode 100644 index 00000000..763eb774 --- /dev/null +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -0,0 +1,207 @@ +from pathlib import Path +from typing import Iterable, Iterator, Optional + +import numpy as np +from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord, UnitSphericalRepresentation +from astropy.time import Time +from astropy.units import Quantity +from numpy._typing import ArrayLike +from scoords import SpacecraftFrame + +from cosipy import UnBinnedData +from cosipy.interfaces import EventWithEnergyInterface, EventDataInterface, EventDataWithEnergyInterface +from cosipy.interfaces.data_interface import ComptonDataSpaceEventDataInterface, TimeTagEmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import ComptonDataSpaceEventInterface, TimeTagEmCDSEventInSCFrameInterface + +import astropy.units as u + +from cosipy.interfaces.event_selection import EventSelectorInterface + + +class TimeTagEmCDSEventInSCFrame(TimeTagEmCDSEventInSCFrameInterface): + + _frame = SpacecraftFrame() + + def __init__(self, id, jd1, jd2, energy, phi, psi, chi): + """ + Parameters + ---------- + jd1: julian days + jd2: julian days + energy: keV + phi: scattering angle radians + psi: scattering latitude radians + chi: scattering longitude radians + """ + self._id = id + self._jd1 = jd1 + self._jd2 = jd2 + self._energy = energy + self._phi = phi + self._psi = psi + self._chi = chi + + @property + def id(self) -> int: + return self._id + + @property + def frame(self): + return self._frame + + @property + def energy_keV(self) -> float: + return self._energy + + @property + def scattering_angle_rad(self) -> float: + return self._phi + + @property + def scattered_lon_rad(self) -> float: + return self._chi + + @property + def scattered_lat_radians(self) -> float: + return self._psi + +class TimeTagEmCDSEventDataInSCFrameFromArrays(TimeTagEmCDSEventDataInSCFrameInterface): + """ + + """ + + _frame = SpacecraftFrame() + event_type = TimeTagEmCDSEventInSCFrame + + def __init__(self, + time:Time, + energy:Quantity, + scattering_angle:Angle, + scattered_direction:SkyCoord, + event_id:Optional[Iterable[int]] = None, + selection:EventSelectorInterface = None): + """ + + Parameters + ---------- + time + energy: keV + scattering_angle: scattering angle radians + psi: scattering latitude radians + chi: scattering longitude radians + id: range(size) by default + selection: Optional selection for TimeTagEmCDSEventInSCFrame events + """ + self._jd1 = time.jd1 + self._jd2 = time.jd2 + self._energy = energy.to_value(u.keV) + self._phi = scattering_angle.to_value(u.rad) + + if not isinstance(scattered_direction.frame, SpacecraftFrame): + raise ValueError("Coordinates need to be in SC frame") + + scattered_direction = scattered_direction.represent_as(UnitSphericalRepresentation) + + self._psi = scattered_direction.lat.rad + self._chi = scattered_direction.lon.rad + if event_id is None: + self._id = np.arange(self._jd1.size) + else: + self._id = np.asarray(event_id) + + # Check size + self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi = np.broadcast_arrays(self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi) + + self._nevents = self._id.size + + if selection is not None: + # Apply selection once and for all + new_id = [] + new_jd1 = [] + new_jd2 = [] + new_energy = [] + new_phi = [] + new_psi = [] + new_chi = [] + + nevents = 0 + for event in selection(self): + new_id.append(event.id) + new_jd1.append(event.jd1) + new_jd2.append(event.jd2) + new_energy.append(event.energy) + new_phi.append(event.phi) + new_psi.append(event.psi) + new_chi.append(event.chi) + nevents += 1 + + self._nevents = nevents + + self._id = np.asarray(new_id) + self._jd1 = np.asarray(new_jd1) + self._jd2 = np.asarray(new_jd2) + self._energy = np.asarray(new_energy) + self._phi = np.asarray(new_phi) + self._psi = np.asarray(new_psi) + self._chi = np.asarray(new_chi) + + def __getitem__(self, i: int) -> TimeTagEmCDSEventInSCFrameInterface: + return TimeTagEmCDSEventInSCFrame(self._id[i], self._jd1[i], self._jd2[i], self._energy[i], self._phi[i], self._psi[i], self._chi[i]) + + @property + def nevents(self) -> int: + return self._nevents + + def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]: + for id, jd1, jd2, energy, phi, psi, chi in zip(self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi): + yield TimeTagEmCDSEventInSCFrame(id, jd1, jd2, energy, phi, psi, chi) + + @property + def frame(self) -> SpacecraftFrame: + return self._frame + + @property + def ids(self) -> Iterable[int]: + return self._id + + @property + def jd1(self) -> Iterable[float]: + return self._jd1 + + @property + def jd2(self) -> Iterable[float]: + return self._jd1 + + @property + def energy_rad(self) -> Iterable[float]: + return self._energy + + @property + def scattering_angle_rad(self) -> Iterable[float]: + return self._phi + + @property + def scattered_lon_rad(self) -> Iterable[float]: + return self._chi + + @property + def scattered_lat_rad(self) -> Iterable[float]: + return self._phi + +class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): + + def __init__(self, data_path: Path): + + # get_dict_from_fits is really a static method, no config file needed + data_dict = UnBinnedData.get_dict_from_fits(None, data_path) + time = Time(data_dict['TimeTags'], format='unix') + energy = u.Quantity(data_dict['Energies'], u.keV) + phi = Angle(data_dict['Phi'], u.rad) + psichi = SkyCoord(data_dict['Chi local'], np.pi / 2 - data_dict['Psi local'], unit=u.rad, + frame=SpacecraftFrame()) + + super().__init__(time, energy, phi, psichi) + + + + diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index 147e5324..ca6dc311 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -9,7 +9,7 @@ import time import cosipy from cosipy.data_io import DataIO -from cosipy.interfaces.data_interface import TimeTagEventData, EventDataWithEnergy +from cosipy.interfaces.data_interface import TimeTagEventDataInterface, EventDataWithEnergyInterface from cosipy.spacecraftfile import SpacecraftHistory import gzip import astropy.coordinates as astro_co diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index bb21521b..4e7b616d 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -2,10 +2,14 @@ from typing import Protocol, runtime_checkable, Dict, Type, Any, Tuple, Iterator, Union, Sequence, Iterable, ClassVar import numpy as np +from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord from astropy.units import Unit, Quantity +import astropy.units as u +from scoords import SpacecraftFrame -from . import EventWithEnergy -from .event import Event, TimeTagEvent +from . import EventWithEnergyInterface +from .event import EventInterface, TimeTagEventInterface, ComptonDataSpaceEventInterface, \ + ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface from histpy import Histogram, Axes from astropy.time import Time @@ -20,8 +24,8 @@ __all__ = ["DataInterface", "EventDataInterface", "BinnedDataInterface", - "TimeTagEventData", - "EventDataWithEnergy" + "TimeTagEventDataInterface", + "EventDataWithEnergyInterface" ] @runtime_checkable @@ -40,12 +44,12 @@ def axes(self) -> Axes:... @runtime_checkable class EventDataInterface(DataInterface, Protocol): - def __iter__(self) -> Iterator[Event]: + def __iter__(self) -> Iterator[EventInterface]: """ Return one Event at a time """ - def __getitem__(self, item: int) -> Event: + def __getitem__(self, item: int) -> EventInterface: """ Convenience method. Pretty slow in general. It's suggested that the implementations override it @@ -62,10 +66,14 @@ def nevents(self) -> int: """ return sum(1 for _ in iter(self)) + @property + def ids(self) -> Iterable[int]: + return [e.id for e in self] + @runtime_checkable -class TimeTagEventData(EventDataInterface, Protocol): +class TimeTagEventDataInterface(EventDataInterface, Protocol): - def __iter__(self) -> Iterator[TimeTagEvent]:... + def __iter__(self) -> Iterator[TimeTagEventInterface]:... @property def jd1(self) -> Iterable[float]: ... @@ -81,20 +89,67 @@ def time(self) -> Time: return Time(self.jd1, self.jd2, format = 'jd') @runtime_checkable -class EventDataWithEnergy(EventDataInterface, Protocol): +class EventDataWithEnergyInterface(EventDataInterface, Protocol): + + def __iter__(self) -> Iterator[EventWithEnergyInterface]:... + + @property + def energy_rad(self) -> Iterable[float]:... + + @property + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_rad, u.rad) + +@runtime_checkable +class ComptonDataSpaceEventDataInterface(EventDataInterface, Protocol): - def __iter__(self) -> Iterator[EventWithEnergy]:... + def __iter__(self) -> Iterator[ComptonDataSpaceEventInterface]:... @property - def energy_value(self) -> Iterable[float]:... + def frame(self) -> BaseCoordinateFrame: ... @property - def energy_unit(self) -> Unit:... + def scattering_angle_rad(self) -> Iterable[float]:... @property - def energy(self) -> Quantity: + def scattering_angle(self) -> Angle: """ Add fancy energy quantity """ - return Quantity(self.energy_value, self.energy_unit) + return Angle(self.scattering_angle_rad, u.rad) + @property + def scattered_lon_rad(self) -> Iterable[float]: ... + + @property + def scattered_lat_rad(self) -> Iterable[float]: ... + + @property + def scattered_direction(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.scattered_lon_rad, + np.pi/2 - self.scattered_lat_rad, + unit = u.rad, + frame = self.frame) + +@runtime_checkable +class EventDataInSCFrameInterface(EventDataInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +@runtime_checkable +class ComptonDataSpaceInSCFrameEventDataInterface(EventDataInSCFrameInterface, + ComptonDataSpaceEventDataInterface, + Protocol): + def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... + +class TimeTagEmCDSEventDataInSCFrameInterface(TimeTagEventDataInterface, + EventDataWithEnergyInterface, + ComptonDataSpaceInSCFrameEventDataInterface): + def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]:... diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index a0bcb698..f93742c7 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -1,14 +1,19 @@ from abc import ABC, abstractmethod from typing import Sequence, Union, Protocol + +import numpy as np +from astropy.coordinates import Angle, SkyCoord, BaseCoordinateFrame +from scoords import SpacecraftFrame from typing_extensions import runtime_checkable from astropy.time import Time from astropy.units import Quantity, Unit +import astropy.units as u __all__ = [ - "Event", - "TimeTagEvent", - "EventWithEnergy", + "EventInterface", + "TimeTagEventInterface", + "EventWithEnergyInterface", ] class EventMetadata: @@ -32,16 +37,24 @@ def __repr__(self): return f"{self.__class__.__name__}({self._metadata})" @runtime_checkable -class Event(Protocol): +class EventInterface(Protocol): """ Derived classes implement all accessors """ + @property + def id(self) -> int: + """ + Typically set by the main data loader or source. + + No necessarily in sequential order + """ + @property def metadata(self) -> EventMetadata:... @runtime_checkable -class TimeTagEvent(Event, Protocol): +class TimeTagEventInterface(EventInterface, Protocol): @property def jd1(self) -> float:... @@ -57,18 +70,69 @@ def time(self) -> Time: return Time(self.jd1, self.jd2, format = 'jd') @runtime_checkable -class EventWithEnergy(Event, Protocol): +class EventWithEnergyInterface(EventInterface, Protocol): @property - def energy_value(self) -> float:... + def energy_keV(self) -> float:... @property - def energy_unit(self) -> Unit:... + def energy(self) -> Quantity: + """ + Add fancy energy quantity + """ + return Quantity(self.energy_keV, u.keV) + +@runtime_checkable +class ComptonDataSpaceEventInterface(EventInterface, Protocol): @property - def energy(self) -> Quantity: + def frame(self) -> BaseCoordinateFrame:... + + @property + def scattering_angle_rad(self) -> float: ... + + @property + def scattering_angle(self) -> Angle: + """ + Add fancy energy quantity + """ + return Angle(self.scattering_angle_rad, u.rad) + + @property + def scattered_lon_rad(self) -> float: ... + + @property + def scattered_lat_radians(self) -> float: ... + + @property + def scattered_direction(self) -> SkyCoord: """ Add fancy energy quantity """ - return Quantity(self.energy_value, self.energy_unit) + return SkyCoord(self.scattered_lon_rad, + np.pi/2 - self.scattered_lat_radians, + unit=u.rad, + frame=self.frame) + + +@runtime_checkable +class EventInSCFrameInterface(EventInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +@runtime_checkable +class ComptonDataSpaceInSCFrameEventInterface(EventInSCFrameInterface, + ComptonDataSpaceEventInterface, + Protocol): + pass + +class TimeTagEmCDSEventInSCFrameInterface(TimeTagEventInterface, + EventWithEnergyInterface, + ComptonDataSpaceInSCFrameEventInterface): + pass + + + + diff --git a/cosipy/interfaces/event_selection.py b/cosipy/interfaces/event_selection.py index f7b80ecb..81734e77 100644 --- a/cosipy/interfaces/event_selection.py +++ b/cosipy/interfaces/event_selection.py @@ -2,12 +2,12 @@ from typing import Protocol, runtime_checkable, Dict, Any, Iterator, Sequence, Generator, Iterable, Union, Optional, \ Tuple -from . import Event +from . import EventInterface @runtime_checkable class EventSelectorInterface(Protocol): - def select(self, event:Union[Event, Iterable[Event]]) -> Union[bool, Iterable[bool]]: + def select(self, event:Union[EventInterface, Iterable[EventInterface]]) -> Union[bool, Iterable[bool]]: """ True to keep an event @@ -15,7 +15,7 @@ def select(self, event:Union[Event, Iterable[Event]]) -> Union[bool, Iterable[bo As many values for an Iterable of events """ - def mask(self, events: Iterable[Event]) -> Iterable[Tuple[bool,Event]]: + def mask(self, events: Iterable[EventInterface]) -> Iterable[Tuple[bool,EventInterface]]: """ Returns an iterable of tuples. Each tuple has 2 elements: - First: True to keep an event, False to filter it out. @@ -25,9 +25,11 @@ def mask(self, events: Iterable[Event]) -> Iterable[Tuple[bool,Event]]: for selected, event in zip(self.select(events1), events2): yield selected, event - def __call__(self, events: Iterable[Event]) -> Iterable[Event]: + def __call__(self, events: Iterable[EventInterface]) -> Union[Iterable[EventInterface], None]: """ Skips events that were not selected + + Returning None raises StopIteration """ for selected,event in self.mask(events): if selected: diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 9c062185..b7bc11e7 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -4,7 +4,7 @@ import numpy as np from histpy import Axes -from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface, Event +from cosipy.interfaces import BinnedDataInterface, EventDataInterface, DataInterface, EventInterface __all__ = [ "ExpectationDensityInterface", @@ -43,10 +43,17 @@ def ncounts(self) -> float: Total expected counts """ - def expectation_density(self, events: Union[Event, Iterable[Event]]) -> Union[Event, Iterable[float]]: + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: """ - Return a single value for a single Event. - As many values for an Iterable of events + Return the expected number of counts density from the start-th event + to the stop-th event. + + Parameters + ---------- + start : None | int + From beginning by default + stop: None|int + Until the end by default """ diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index 61adc065..1c0bd4b3 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -1,18 +1,21 @@ -from typing import Protocol, Union +from typing import Protocol, Union, Optional, Iterable, Tuple, runtime_checkable from astropy.coordinates import SkyCoord +from astropy.time import Time from astropy.units import Quantity from histpy import Axes, Histogram from astropy import units as u from scoords import Attitude -from cosipy.interfaces import BinnedDataInterface +from cosipy.interfaces import BinnedDataInterface, ExpectationDensityInterface, BinnedExpectationInterface, EventInterface +from cosipy.interfaces.photon_list import PhotonListWithDirectionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface from cosipy.polarization import PolarizationAngle __all__ = ["BinnedInstrumentResponseInterface"] -class BinnedInstrumentResponseInterface(Protocol): +class BinnedInstrumentResponseInterface(BinnedExpectationInterface, Protocol): def differential_effective_area(self, data: BinnedDataInterface, @@ -53,3 +56,52 @@ def differential_effective_area(self, The effective area times the event measurement probability distribution integrated on each of the bins of the provided axes. It has the shape (direction.shape, energy.shape, polarization.shape, axes.shape) """ + +@runtime_checkable +class InstrumentResponseFunctionInterface(Protocol): + + def event_probability(self, query: Iterable[Tuple[PhotonInterface, EventInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + """ + + def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInterface]: + """ + Return a stream of random events, one per photon + """ + +@runtime_checkable +class NearFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): + + def effective_area_cm2(self, photons: PhotonListWithDirectionInterface) -> Iterable[float]: + """ + + """ + + def effective_area(self, photons: PhotonListWithDirectionInterface) -> Iterable[u.Quantity]: + """ + Convenience function + """ + for area_cm2 in self.effective_area_cm2(photons): + yield u.Quantity(area_cm2, u.cm2) + + + + + + + + + + + + + + + + + + + + + diff --git a/cosipy/interfaces/photon_list.py b/cosipy/interfaces/photon_list.py new file mode 100644 index 00000000..a0e1ff5a --- /dev/null +++ b/cosipy/interfaces/photon_list.py @@ -0,0 +1,84 @@ +import itertools +from typing import Protocol, ClassVar, Type, Iterator, runtime_checkable, Iterable + +from astropy.coordinates import BaseCoordinateFrame, SkyCoord +from scoords import SpacecraftFrame + +from .photon_parameters import PhotonInterface, PhotonWithEnergyInterface + +import astropy.units as u + +@runtime_checkable +class PhotonListInterface(Protocol): + + # Type returned by __iter__ + photon_type = ClassVar[Type] + + def __iter__(self) -> Iterator[PhotonInterface]: + """ + Return one Event at a time + """ + def __getitem__(self, item: int) -> PhotonInterface: + """ + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return next(itertools.islice(self, item, None)) + + @property + def nphotons(self) -> int: + """ + Total number of events yielded by __iter__ + + Convenience method. Pretty slow in general. It's suggested that + the implementations override it + """ + return sum(1 for _ in iter(self)) + +@runtime_checkable +class EventDataWithEnergyInterface(PhotonListInterface, Protocol): + + def __iter__(self) -> Iterator[PhotonWithEnergyInterface]:... + + @property + def energy_radians(self) -> Iterable[float]:... + + @property + def energy(self) -> u.Quantity: + """ + Add fancy energy quantity + """ + return u.Quantity(self.energy_radians, u.radians) + +@runtime_checkable +class PhotonListWithDirectionInterface(PhotonListInterface, Protocol): + + @property + def frame(self) -> BaseCoordinateFrame:... + + @property + def direction_lon_radians(self) -> Iterable[float]: ... + + @property + def direction_lat_radians(self) -> Iterable[float]: ... + + @property + def direction_direction(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.direction_lon_radians, + self.direction_lat_radians, + unit=u.rad, + frame=self.frame) + +@runtime_checkable +class PhotonListInSCFrameInterface(PhotonListInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +class PhotonWithDirectionInSCFrameInterface(PhotonListWithDirectionInterface, + PhotonListInSCFrameInterface): + pass + diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py new file mode 100644 index 00000000..b8570ca9 --- /dev/null +++ b/cosipy/interfaces/photon_parameters.py @@ -0,0 +1,57 @@ +from typing import Protocol, runtime_checkable + +from astropy import units as u +from astropy.coordinates import BaseCoordinateFrame, SkyCoord +from scoords import SpacecraftFrame + + +@runtime_checkable +class PhotonInterface(Protocol): + """ + Derived classes have all access methods + """ + +@runtime_checkable +class PhotonWithEnergyInterface(PhotonInterface, Protocol): + + @property + def energy_keV(self) -> float:... + + @property + def energy(self) -> u.Quantity: + """ + Add fancy energy quantity + """ + return u.Quantity(self.energy_keV, u.keV) + +@runtime_checkable +class PhotonWithDirectionInterface(PhotonInterface, Protocol): + + @property + def frame(self) -> BaseCoordinateFrame:... + + @property + def direction_lon_radians(self) -> float: ... + + @property + def direction_lat_radians(self) -> float: ... + + @property + def direction_direction(self) -> SkyCoord: + """ + Add fancy energy quantity + """ + return SkyCoord(self.direction_lon_radians, + self.direction_lat_radians, + unit=u.rad, + frame=self.frame) + +@runtime_checkable +class PhotonInSCFrameInterface(PhotonInterface, Protocol): + + @property + def frame(self) -> SpacecraftFrame:... + +class PhotonWithDirectionInSCFrameInterface(PhotonWithDirectionInterface, + PhotonInSCFrameInterface): + pass \ No newline at end of file diff --git a/cosipy/interfaces/threeml_plugin_interface.py b/cosipy/interfaces/threeml_plugin_interface.py index eb5f15cf..f1590592 100644 --- a/cosipy/interfaces/threeml_plugin_interface.py +++ b/cosipy/interfaces/threeml_plugin_interface.py @@ -1,6 +1,6 @@ -from typing import Dict +from typing import Dict, Optional -from cosipy.interfaces import ThreeMLModelFoldingInterface +from cosipy.interfaces import ThreeMLModelFoldingInterface, BackgroundInterface from cosipy.interfaces.likelihood_interface import LikelihoodInterface from threeML import PluginPrototype, Parameter @@ -9,7 +9,10 @@ class ThreeMLPluginInterface(PluginPrototype): def __init__(self, - name: str, likelihood: LikelihoodInterface): + name: str, + likelihood: LikelihoodInterface, + response:ThreeMLModelFoldingInterface, + bkg:Optional[BackgroundInterface] = None,): """ Parameters @@ -26,20 +29,18 @@ def __init__(self, super().__init__(name, {}) self._like = likelihood - - # Check we can use this likelihood - if not isinstance(self._like.response, ThreeMLModelFoldingInterface): - raise TypeError("ThreeMLPluginInterface needs a LikelihoodInterface using a response of type ThreeMLModelResponseInterface") + self._response = response + self._bkg = bkg # Currently, the only nuisance parameters are the ones for the bkg # We could have systematics here as well - if self._like.bkg is None: + if self._bkg is None: self._threeml_bkg_parameters = {} else: # 1. Adds plugin name, required by 3ML code # See https://github.com/threeML/threeML/blob/7a16580d9d5ed57166e3b1eec3d4fccd3eeef1eb/threeML/classicMLE/joint_likelihood.py#L131 # 2. Translation to bkg bare parameters. 3ML "Parameter" keeps track of a few more things than a "bare" (Quantity) parameter. - self._threeml_bkg_parameters = {self._add_prefix_name(label): Parameter(label, param.value, unit=param.unit) for label, param in self._like.bkg.parameters.items()} + self._threeml_bkg_parameters = {self._add_prefix_name(label): Parameter(label, param.value, unit=param.unit) for label, param in self._bkg.parameters.items()} # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter self.bkg_parameter = ThreeMLPluginInterface._Bkg_parameter(self) @@ -67,14 +68,14 @@ def update_nuisance_parameters(self, new_nuisance_parameters: Dict[str, Paramete def _update_bkg_parameters(self, name = None): # 1. Remove plugin name. Opposite of the nuisance_parameters property # 2. Convert to "bare" Quantity value - if self._like.bkg is not None: + if self._bkg is not None: if name is None: #Update all - self._like.bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in + self._bkg.set_parameters(**{self._remove_prefix_name(label): parameter.as_quantity for label, parameter in self._threeml_bkg_parameters.items()}) else: # Only specific value - self._like.bkg.set_parameters(**{name:self._threeml_bkg_parameters[self._add_prefix_name(name)].as_quantity}) + self._bkg.set_parameters(**{name:self._threeml_bkg_parameters[self._add_prefix_name(name)].as_quantity}) class _Bkg_parameter: # Allows idiom plugin.bkg_parameters["bkg_param_name"] to get 3ML parameter @@ -94,7 +95,7 @@ def get_number_of_data_points(self) -> int: return self._like.nobservations def set_model(self, model): - self._like.response.set_model(model) + self._response.set_model(model) def get_log_like(self): # Update underlying background object in case the Parameter objects changed internally diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 413fe9b5..21e68769 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -21,18 +21,19 @@ 'PoissonLikelihood'] class UnbinnedLikelihood(UnbinnedLikelihoodInterface): - def __init__(self, data:EventDataInterface, response:ExpectationDensityInterface, bkg:BackgroundDensityInterface = None): + def __init__(self, response:ExpectationDensityInterface, bkg:BackgroundDensityInterface = None): + """ + Will get the number of events from the response and bkg expectation_density iterators + + Parameters + ---------- + response + bkg + """ - self._data = data self._bkg = bkg self._response = response - - @property - def data (self) -> EventDataInterface: return self._data - @property - def response(self) -> ExpectationDensityInterface: return self._response - @property - def bkg (self) -> BackgroundDensityInterface: return self._bkg + self._nobservations = None @property def has_bkg(self): @@ -40,7 +41,28 @@ def has_bkg(self): @property def nobservations(self) -> int: - return self._data.nevents + """ + Calling get_log_like first is faster, since we don't need to loop though the + events + """ + + if self._nobservations is None: + self._nobservations = sum(1 for _ in self._get_density_iter()) + + return self._nobservations + + def _get_density_iter(self): + + if self.has_bkg: + + signal_density = self._response.expectation_density() + bkg_density = self._bkg.expectation_density() + + return map(operator.add, signal_density, bkg_density) + + else: + + return self._response.expectation_density() def get_log_like(self) -> float: @@ -49,39 +71,42 @@ def get_log_like(self) -> float: ntot = self._response.ncounts() if self.has_bkg: - ntot += self._bkg.ncounts() - # Prevent 2 iteration over data using tee() - data_iter_1, data_iter_2 = itertools.tee(self._data, 2) + # It's faster to compute all log values at once, but requires keeping them in memory + # Doing it by chunk is a compromise. We might need to adjust the chunk_size + # Based on the system + nobservations = 0 + density_log_sum = 0 - signal_density = self._response.expectation_density(data_iter_1) - bkg_density = self._bkg.expectation_density(data_iter_2) + def chunks(): + chunk_size = 100000 + it = iter(self._get_density_iter()) + while chunk := tuple(itertools.islice(it, chunk_size)): + yield chunk - density = np.fromiter(map(operator.add, signal_density, bkg_density), dtype=float) + for density_iter_chunk in chunks(): - else: - density = np.fromiter(self._response.expectation_density(), dtype=float) + density = np.fromiter(density_iter_chunk, dtype=float) + density_log_sum += np.sum(np.log(density)) + nobservations += density.size - log_like = np.sum(np.log(density)) - ntot + self._nobservations = nobservations + + log_like = density_log_sum - ntot return log_like class PoissonLikelihood(BinnedLikelihoodInterface): - def __init__(self, data:BinnedDataInterface, response:BinnedExpectationInterface, bkg:BinnedBackgroundInterface = None): + def __init__(self, data:BinnedDataInterface, + response:BinnedExpectationInterface, + bkg:BinnedBackgroundInterface = None): self._data = data self._bkg = bkg self._response = response - @property - def data (self) -> BinnedDataInterface: return self._data - @property - def response(self) -> BinnedExpectationInterface: return self._response - @property - def bkg (self) -> BinnedBackgroundInterface: return self._bkg - @property def has_bkg(self): return self._bkg is not None diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 6d28b7a2..6088baf2 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -209,7 +209,10 @@ def main(): like_fun = PoissonLikelihood(data, response, bkg) - cosi = ThreeMLPluginInterface('cosi', like_fun) + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) # Nuisance parameter guess, bounds, etc. cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 23cefabc..eb1e5efa 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -1,52 +1,12 @@ #!/usr/bin/env python # coding: utf-8 -# # Spectral fitting example (Crab) -# **To run this, you need the following files, which can be downloaded using the first few cells of this notebook:** -# - orientation file (20280301_3_month_with_orbital_info.ori) -# - binned data (crab_bkg_binned_data.hdf5, crab_binned_data.hdf5, & bkg_binned_data.hdf5) -# - detector response (SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5) -# -# **The binned data are simulations of the Crab Nebula and albedo photon background produced using the COSI SMEX mass model. The detector response needs to be unzipped before running the notebook.** - -# This notebook fits the spectrum of a Crab simulated using MEGAlib and combined with background. -# -# [3ML](https://threeml.readthedocs.io/) is a high-level interface that allows multiple datasets from different instruments to be used coherently to fit the parameters of source model. A source model typically consists of a list of sources with parametrized spectral shapes, sky locations and, for extended sources, shape. Polarization is also possible. A "coherent" analysis, in this context, means that the source model parameters are fitted using all available datasets simultanously, rather than performing individual fits and finding a well-suited common model a posteriori. -# -# In order for a dataset to be included in 3ML, each instrument needs to provide a "plugin". Each plugin is responsible for reading the data, convolving the source model (provided by 3ML) with the instrument response, and returning a likelihood. In our case, we'll compute a binned Poisson likelihood: -# -# $$ -# \log \mathcal{L}(\mathbf{x}) = \sum_i \log \frac{\lambda_i(\mathbf{x})^{d_i} \exp (-\lambda_i)}{d_i!} -# $$ -# -# where $d_i$ are the counts on each bin and $\lambda_i$ are the expected counts given a source model with parameters $\mathbf{x}$. -# -# In this example, we will fit a single point source with a known location. We'll assume the background is known and fixed up to a scaling factor. Finally, we will fit a Band function: -# -# $$ -# f(x) = K \begin{cases} \left(\frac{x}{E_{piv}}\right)^{\alpha} \exp \left(-\frac{(2+\alpha) -# * x}{x_{p}}\right) & x \leq (\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \\ \left(\frac{x}{E_{piv}}\right)^{\beta} -# * \exp (\beta-\alpha)\left[\frac{(\alpha-\beta) x_{p}}{E_{piv}(2+\alpha)}\right]^{\alpha-\beta} -# * &x>(\alpha-\beta) \frac{x_{p}}{(\alpha+2)} \end{cases} -# $$ -# -# where $K$ (normalization), $\alpha$ & $\beta$ (spectral indeces), and $x_p$ (peak energy) are the free parameters, while $E_{piv}$ is the pivot energy which is fixed (and arbitrary). -# -# Considering these assumptions: -# -# $$ -# \lambda_i(\mathbf{x}) = B*b_i + s_i(\mathbf{x}) -# $$ -# -# where $B*b_i$ are the estimated counts due to background in each bin with $B$ the amplitude and $b_i$ the shape of the background, and $s_i$ are the corresponding expected counts from the source, the goal is then to find the values of $\mathbf{x} = [K, \alpha, \beta, x_p]$ and $B$ that maximize $\mathcal{L}$. These are the best estimations of the parameters. -# -# The final module needs to also fit the time-dependent background, handle multiple point-like and extended sources, as well as all the spectral models supported by 3ML. Eventually, it will also fit the polarization angle. However, this simple example already contains all the necessary pieces to do a fit. - -# In[1]: from cosipy import test_data, BinnedData, UnBinnedData +from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventDataInSCFrameFromArrays, \ + TimeTagEmCDSEventDataInSCFrameFromDC3Fits from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse from cosipy.util import fetch_wasabi_file @@ -62,7 +22,7 @@ from astropy.time import Time import astropy.units as u -from astropy.coordinates import SkyCoord, Galactic +from astropy.coordinates import SkyCoord, Galactic, Angle import numpy as np import matplotlib.pyplot as plt @@ -76,360 +36,16 @@ def main(): - # ## Download and read in binned data - - # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into - - # In[2]: - - - data_path = Path("") # /path/to/files. Current dir by default - - - # Download the orientation file - - # In[ ]: - - fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', - output=str(data_path / 'DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori'), - checksum='e5e71e3528e39b855b0e4f74a1a2eebe') - - # Download the unbinned Crab data - - # In[5]: - - - fetch_wasabi_file('COSI-SMEX/DC3/Data/Sources/crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz', output=str(data_path / 'crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz'), checksum = '1d73e7b9e46e51215738075e91a52632') - - - - # Read in the spacecraft orientation file - - # In[4]: - - - sc_orientation = SpacecraftHistory.open(data_path / "DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori") - - - - # Read data - - UnBinnedData - - # Create BinnedData objects for the Crab only, Crab+background, and background only. The Crab only simulation is not used for the spectral fit, but can be used to compare the fitted spectrum to the source simulation - - # In[5]: - - - crab = BinnedData(data_path / "crab.yaml") - crab_bkg = BinnedData(data_path / "crab.yaml") - bkg = BinnedData(data_path / "background.yaml") - - - # Load binned .hdf5 files - - # In[6]: - - - crab.load_binned_data_from_hdf5(binned_data=data_path / "crab_binned_data.hdf5") - crab_bkg.load_binned_data_from_hdf5(binned_data=data_path / "crab_bkg_binned_data.hdf5") - bkg.load_binned_data_from_hdf5(binned_data=data_path / "bkg_binned_data.hdf5") - - - # Define the path to the detector response - - # In[7]: - - - # Before and after Jeremy's changes - dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5") # path to detector response - #dr = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response - - - # ## Perform spectral fit - - # ============ Interfaces ============== - - output_suffix = 'interfaces' - - dr = FullDetectorResponse.open(dr) - instrument_response = BinnedInstrumentResponse(dr) - - # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin - - # In[8]: - - bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') - - # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. - # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and - # it was added to the expectation, not the normalized bkg - bkg_dist += sys.float_info.min - - data = crab_bkg.get_em_cds() - - bkg = FreeNormBinnedBackground(bkg_dist) - - # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, - # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. - # In principle, this can be decoupled, and a BinnedInstrumentResponseInterface implementation - # can provide the response for an arbitrary directions, Ei and Pol values. - # NOTE: this is currently only implemented for data in local coords - psr = BinnedThreeMLPointSourceResponse(data = data, - instrument_response = instrument_response, - sc_history=sc_orientation, - energy_axis = dr.axes['Ei'], - polarization_axis = dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, - nside = 2*data.axes['PsiChi'].nside) - - ##==== - - - response = BinnedThreeMLModelFolding(data = data, point_source_response = psr) - - like_fun = PoissonLikelihood() - like_fun.set_data(data) - like_fun.set_response(response) - like_fun.set_background(bkg) - - cosi = ThreeMLPluginInterface('cosi', like_fun) - - # Nuisance parameter guess, bounds, etc. - cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter - 1, # initial value of parameter - min_value=0, # minimum value of parameter - max_value=5, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - - - # ======== Interfaces end ========== - - # Define a point source at the known location with a Band function spectrum and add it to the model. The initial values of the Band function parameters are set to the true values used to simulate the source - - - # In[9]: - - - l = 184.56 - b = -5.78 - - alpha = -1.99 - beta = -2.32 - E0 = 531. * (alpha - beta) * u.keV - xp = E0 * (alpha + 2) / (alpha - beta) - piv = 500. * u.keV - K = 3.07e-5 / u.cm / u.cm / u.s / u.keV - - spectrum = Band() - - spectrum.alpha.min_value = -2.14 - spectrum.alpha.max_value = 3.0 - spectrum.beta.min_value = -5.0 - spectrum.beta.max_value = -2.15 - spectrum.xp.min_value = 1.0 - - spectrum.alpha.value = alpha - spectrum.beta.value = beta - spectrum.xp.value = xp.value - spectrum.K.value = K.value - spectrum.piv.value = piv.value - - spectrum.xp.unit = xp.unit - spectrum.K.unit = K.unit - spectrum.piv.unit = piv.unit - - spectrum.alpha.delta = 0.01 - spectrum.beta.delta = 0.01 - - source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l = l, # Longitude (deg) - b = b, # Latitude (deg) - spectral_shape = spectrum) # Spectral model - - # Optional: free the position parameters - #source.position.l.free = True - #source.position.b.free = True - - model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) - - # Optional: if you want to call get_log_like manually, then you also need to set the model manually - # 3ML does this internally during the fit though - cosi.set_model(model) - - - # Gather all plugins and combine with the model in a JointLikelihood object, then perform maximum likelihood fit - - # In[10]: - - - plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) - - like = JointLikelihood(model, plugins, verbose = False) - - like.fit() - - - # ## Error propagation and plotting (Band function) - - # Define Band function spectrum injected into MEGAlib - - # In[11]: - - ## Injected - - l = 184.56 - b = -5.78 - - alpha_inj = -1.99 - beta_inj = -2.32 - E0_inj = 531. * (alpha_inj - beta_inj) * u.keV - xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) - piv_inj = 100. * u.keV - K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV - - spectrum_inj = Band() - - spectrum_inj.alpha.min_value = -2.14 - spectrum_inj.alpha.max_value = 3.0 - spectrum_inj.beta.min_value = -5.0 - spectrum_inj.beta.max_value = -2.15 - spectrum_inj.xp.min_value = 1.0 - - spectrum_inj.alpha.value = alpha_inj - spectrum_inj.beta.value = beta_inj - spectrum_inj.xp.value = xp_inj.value - spectrum_inj.K.value = K_inj.value - spectrum_inj.piv.value = piv_inj.value - - spectrum_inj.xp.unit = xp_inj.unit - spectrum_inj.K.unit = K_inj.unit - spectrum_inj.piv.unit = piv_inj.unit - - # Expectation for injected source - source_inj = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l=l, # Longitude (deg) - b=b, # Latitude (deg) - spectral_shape=spectrum_inj) # Spectral model - - psr.set_source(source_inj) - expectation_inj = psr.expectation(data, copy=True) - - - # The summary of the results above tell you the optimal values of the parameters, as well as the errors. Propogate the errors to the "evaluate_at" method of the spectrum - - # In[12]: - - - results = like.results - - - print(results.display()) - - parameters = {par.name:results.get_variates(par.path) - for par in results.optimized_model["source"].parameters.values() - if par.free} - - results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) - - print(results.optimized_model["source"]) - - # Evaluate the flux and errors at a range of energies for the fitted and injected spectra, and the simulated source flux - - # In[13]: - - - energy = np.geomspace(100*u.keV,10*u.MeV).to_value(u.keV) - - flux_lo = np.zeros_like(energy) - flux_median = np.zeros_like(energy) - flux_hi = np.zeros_like(energy) - flux_inj = np.zeros_like(energy) - - for i, e in enumerate(energy): - flux = results_err(e) - flux_median[i] = flux.median - flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) - flux_inj[i] = spectrum_inj.evaluate_at(e) - - binned_energy_edges = crab.binned_data.axes['Em'].edges.value - binned_energy = np.array([]) - bin_sizes = np.array([]) - - for i in range(len(binned_energy_edges)-1): - binned_energy = np.append(binned_energy, (binned_energy_edges[i+1] + binned_energy_edges[i]) / 2) - bin_sizes = np.append(bin_sizes, binned_energy_edges[i+1] - binned_energy_edges[i]) - - expectation = response.expectation(data, copy = True) - - - # Plot the fitted and injected spectra - - # In[14]: - - - fig,ax = plt.subplots() - - ax.plot(energy, energy*energy*flux_median, label = "Best fit") - ax.fill_between(energy, energy*energy*flux_lo, energy*energy*flux_hi, alpha = .5, label = "Best fit (errors)") - ax.plot(energy, energy*energy*flux_inj, color = 'black', ls = ":", label = "Injected") - - ax.set_xscale("log") - ax.set_yscale("log") - - ax.set_xlabel("Energy (keV)") - ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") - - ax.legend() - - plt.show() - - # Plot the fitted spectrum convolved with the response, as well as the simulated source counts - - # In[15]: - - - fig,ax = plt.subplots() - - ax.stairs(expectation.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response") - ax.stairs(expectation_inj.project('Em').todense().contents, binned_energy_edges, color='blue', label = "Injected spectrum convolved with response") - ax.errorbar(binned_energy, expectation.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) - ax.stairs(crab.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Source counts") - ax.errorbar(binned_energy, crab.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) - - ax.set_xscale("log") - ax.set_yscale("log") - - ax.set_xlabel("Energy (keV)") - ax.set_ylabel("Counts") - - ax.legend() - - plt.show() - - - # Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts - - # In[16]: - - expectation_bkg = bkg.expectation(data.axes, copy = True) - - fig,ax = plt.subplots() - - ax.stairs(expectation.project('Em').todense().contents + expectation_bkg.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response plus background") - ax.errorbar(binned_energy, expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) - ax.stairs(crab_bkg.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") - ax.errorbar(binned_energy, crab_bkg.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab_bkg.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) - - ax.set_xscale("log") - ax.set_yscale("log") + # Download all data + data_path = Path("") # /path/to/files. Current dir by default - ax.set_xlabel("Energy (keV)") - ax.set_ylabel("Counts") + crab_data_path = data_path / "crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Sources/crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz', + output=str(crab_data_path), checksum='1d73e7b9e46e51215738075e91a52632') - ax.legend() + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path) - plt.show() + return if __name__ == "__main__": diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index cb8c2fcd..7da237c4 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -121,7 +121,10 @@ def main(): like_fun = PoissonLikelihood(data, response, bkg) - cosi = ThreeMLPluginInterface('cosi', like_fun) + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) # Nuisance parameter guess, bounds, etc. cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index abab2a0a..8d21f5a2 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -11,7 +11,7 @@ from cosipy import SpacecraftHistory from cosipy.interfaces.background_interface import BackgroundDensityInterface -from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventData +from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventDataInterface from cosipy.interfaces.event import EventMetadata from cosipy.interfaces.event_selection import EventSelectorInterface @@ -23,8 +23,8 @@ BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, ThreeMLPluginInterface, - UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, Event, - ThreeMLSourceResponseInterface, TimeTagEvent) + UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, EventInterface, + ThreeMLSourceResponseInterface, TimeTagEventInterface) from histpy import Axis, Axes, Histogram import numpy as np from scipy.stats import norm, uniform @@ -55,17 +55,22 @@ nevents_bkg = 1000 nevents_tot = nevents_signal + nevents_bkg -class ToyEvent(TimeTagEvent, Event): +class ToyEvent(TimeTagEventInterface, EventInterface): """ Unit-less 1D data of a measurement called "x" (could be anything) """ - def __init__(self, x, time:Time): + def __init__(self, index:int, x:float, time:Time): + self._id = index self._x = x self._jd1 = time.jd1 self._jd2 = time.jd2 self._metadata = EventMetadata() + @property + def id(self): + return self._id + @property def metadata(self) -> EventMetadata: return self._metadata @@ -86,53 +91,50 @@ class ToyData(DataInterface): event_type = ToyEvent -class ToyEventData(TimeTagEventData, ToyData): - # Random data. Normal signal on top of uniform bkg - - def __init__(self, selector:EventSelectorInterface = None): +class ToyEventDataLoader(ToyData): + # This simulates reading event from file + # Check that they are not being read twice + def __init__(self): rng = np.random.default_rng() - self._x = np.append(rng.normal(size = nevents_signal), rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size = nevents_bkg)) + self._x = np.append(rng.normal(size=nevents_signal), + rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size=nevents_bkg)) self._tstart = Time("2000-01-01T00:00:00") self._tstop = Time("2000-01-02T00:00:00") - self._timestamps = self._tstart + np.random.uniform(size = nevents_tot)*u.day + dt = np.random.uniform(size=nevents_tot) + dt_sort = np.argsort(dt) + self._x = self._x[dt_sort] + dt = dt[dt_sort] - np.random.shuffle(self._x) + self._timestamps = self._tstart + dt * u.day - self._nevents = None # After selection - - # Filter the events once and for all - # It can also be done on the fly if needed - new_x = [] - new_jd1 = [] - new_jd2 = [] - - for event in selector(self): - new_x.append(event.x) - new_jd1.append(event.jd1) - new_jd2.append(event.jd2) + def __iter__(self) -> Iterator[ToyEvent]: + print("Loading events!") + for n,(x,t) in enumerate(zip(self._x, self._timestamps)): + yield ToyEvent(n,x,t) - self._x = np.asarray(new_x) - self._timestamps = Time(new_jd1, new_jd2, format = 'jd') +class ToyEventData(TimeTagEventDataInterface, ToyData): + # Random data. Normal signal on top of uniform bkg - @property - def tsart(self): - return self._tstart + def __init__(self, loader:ToyEventDataLoader, selector:EventSelectorInterface = None): - @property - def tstop(self): - return self._tstop + self._loader = selector(loader) + self._cached_iter = None + self._nevents = None # After selection def __iter__(self) -> Iterator[ToyEvent]: - nselected = 0 - for x,t in zip(self._x, self._timestamps): - nselected += 1 - yield ToyEvent(x,t) - self._nevents = nselected + if self._cached_iter is None: + # First call. Split. Keep one and return the other + self._loader, self._cached_iter = itertools.tee(self._loader) + return self._cached_iter + else: + # Following calls: tee the loader again + self._loader, new_iter = itertools.tee(self._loader) + return new_iter @property def nevents(self) -> int: @@ -144,15 +146,15 @@ def nevents(self) -> int: @property def x(self): - return self._x + return np.asarray([e.x for e in self]) @property def jd1(self) -> Iterable[float]: - return self._timestamps.jd1 + return np.asarray([e.jd1 for e in self]) @property def jd2(self) -> Iterable[float]: - return self._timestamps.jd2 + return np.asarray([e.jd2 for e in self]) class ToyBinnedData(BinnedDataInterface, ToyData): @@ -183,8 +185,9 @@ class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): # code readability, especially if you use an IDE. """ - def __init__(self, duration:Quantity): + def __init__(self, data: ToyEventData, duration:Quantity): + self._data = data self._unit_expectation = Histogram(toy_axis) self._unit_expectation[:] = 1 / self._unit_expectation.nbins self._norm = 1 @@ -198,15 +201,12 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm * self._sel_fraction - def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: density = self._norm * self._unit_expectation_density - if isinstance(data, Event): - return density - else: - for _ in data: - yield density + for _ in itertools.islice(self._data, start, stop): + yield density @property def parameters(self) -> Dict[str, u.Quantity]: @@ -226,7 +226,8 @@ class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThree The normalization --the "flux"-- is the only free parameters """ - def __init__(self, duration:Quantity): + def __init__(self, data: ToyEventData, duration:Quantity): + self._data = data self._source = None self._sel_fraction = (duration/(1*u.day)).to_value('') self._unit_expectation = Histogram(toy_axis, @@ -242,19 +243,21 @@ def ncounts(self) -> float: ns_events = self._sel_fraction * self._source.spectrum.main.shape.k.value return ns_events - def expectation_density(self, data:Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: - - if isinstance(data, Event): - return next(iter(self.expectation_density([data]))) + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: # I expect in the real case it'll be more efficient to compute # (ncounts, ncounts*prob) than (ncounts, prob) - cache = self.ncounts()*norm.pdf([event.x for event in data]) + cache = self.ncounts()*norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) for n in cache: yield n + # Alternative version without cache (slower) + # for event in itertools.islice(self._data, start, stop): + # yield self.ncounts()*norm.pdf(event.x) + + def set_source(self, source: Source): if not isinstance(source, PointSource): @@ -284,8 +287,9 @@ def copy(self) -> "ToyPointSourceResponse": class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): - def __init__(self, psr: ToyPointSourceResponse): + def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): + self._data = data self._model = None self._psr = psr @@ -300,15 +304,11 @@ def ncounts(self) -> float: return ncounts - def expectation_density(self, data: Union[ToyEvent, Iterable[ToyEvent]] = None) -> Iterable[float]: + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: self._cache_psr_copies() - if isinstance(data, Event): - return next(iter(self.expectation_density([data]))) - - # One by one in this example, but they can also be done in chunks (e.g. with itertools batched or islice) - for expectations in zip(*[p.expectation_density(d) for p,d in zip(self._psr_copies.values(), itertools.tee(data))]): + for expectations in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): yield np.sum(expectations) def set_model(self, model: Model): @@ -357,37 +357,59 @@ def get_binned_data(event_data:ToyEventData, axis:Axis) -> ToyBinnedData: class ToyTimeSelector(EventSelectorInterface): def __init__(self, tstart:Time = None, tstop:Time = None): + """ + Assumes events are time-ordered + + Parameters + ---------- + tstart + tstop + """ + self._tstart = tstart self._tstop = tstop - def _select(self, event:TimeTagEvent) -> bool: + def _select(self, event:TimeTagEventInterface) -> bool: # Single event return next(iter(self.select([event]))) - def select(self, events:Union[TimeTagEvent, Iterable[TimeTagEvent]]) -> Union[bool, Iterable[bool]]: + def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: - if isinstance(events, Event): + if isinstance(events, EventInterface): # Single event return self._select(events) else: # Multiple - # Caching results optimizes the result sometimes - # The user can pass the iterable in chunks - jd1 = [] - jd2 = [] + # Example of working in chunks/batches. This can optimize the + # code sometimes. + + def chunks(): + chunk_size = 1000 + it = iter(events) + while chunk := tuple(itertools.islice(it, chunk_size)): + yield chunk + + for chunk in chunks(): - for event in events: - jd1.append(event.jd1) - jd2.append(event.jd2) + jd1 = [] + jd2 = [] - time = Time(jd1, jd2, format = 'jd') + for event in chunk: + jd1.append(event.jd1) + jd2.append(event.jd2) - selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), - np.logical_or(self._tstop is None, time <= self._tstop)) + time = Time(jd1, jd2, format = 'jd') - for sel in selected: - yield sel + selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), + np.logical_or(self._tstop is None, time <= self._tstop)) + + for sel in selected: + yield sel + + if self._tstop is not None and time[-1] > self._tstop: + # Stop further loading of event + return # ======= Actual code. This is how the "tutorial" will look like ================ @@ -395,7 +417,8 @@ def select(self, events:Union[TimeTagEvent, Iterable[TimeTagEvent]]) -> Union[bo def main(): # Binned or unbinned - unbinned = False + unbinned = True + plot = True # Set the inputs. These will eventually open file or set specific parameters, # but since we are generating the data and models on the fly, and most parameter @@ -405,14 +428,12 @@ def main(): duration = tstop - tstart selector = ToyTimeSelector(tstart = tstart, tstop = tstop) - event_data = ToyEventData(selector=selector) - - print(sum(1 for _ in event_data), nevents_tot) + data_loader = ToyEventDataLoader() + event_data = ToyEventData(data_loader, selector=selector) - binned_data = get_binned_data(event_data, toy_axis) - psr = ToyPointSourceResponse(duration = duration) - response = ToyModelFolding(psr) - bkg = ToyBkg(duration = duration) + psr = ToyPointSourceResponse(data = event_data, duration = duration) + response = ToyModelFolding(data = event_data, psr = psr) + bkg = ToyBkg(data = event_data, duration = duration) ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV @@ -440,13 +461,21 @@ def main(): #bkg = None # Uncomment for no bkg #model = Model() # Uncomment for bkg-only hypothesis + binned_data = None + if plot or not unbinned: + binned_data = get_binned_data(event_data, toy_axis) + # Fit if unbinned: - like_fun = UnbinnedLikelihood(event_data, response, bkg) + like_fun = UnbinnedLikelihood(response, bkg) else: like_fun = PoissonLikelihood(binned_data, response, bkg) - cosi = ThreeMLPluginInterface('cosi', like_fun) + + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response = response, + bkg = bkg) # Before the fit, you can set the parameters initial values, bounds, etc. # This is passed to the minimizer. @@ -461,7 +490,6 @@ def main(): print(like.minimizer) # Plot results - plot = True if plot: fig, ax = plt.subplots() From f85069f691d470dedc5dc8d11fe18557e1deca84 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 3 Oct 2025 17:00:23 -0400 Subject: [PATCH 081/133] Add time selection to crab unbinned example Signed-off-by: Israel Martinez --- cosipy/data_io/EmCDSUnbinnedData.py | 64 +++++++++++++----- cosipy/event_selection/__init__.py | 0 cosipy/event_selection/time_selection.py | 66 +++++++++++++++++++ cosipy/interfaces/event.py | 4 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 10 ++- .../examples/toy/toy_interfaces_example.py | 61 +---------------- 6 files changed, 127 insertions(+), 78 deletions(-) create mode 100644 cosipy/event_selection/__init__.py create mode 100644 cosipy/event_selection/time_selection.py diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py index 763eb774..4eb2f5bb 100644 --- a/cosipy/data_io/EmCDSUnbinnedData.py +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Iterable, Iterator, Optional +from typing import Iterable, Iterator, Optional, Tuple import numpy as np from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord, UnitSphericalRepresentation @@ -49,6 +49,14 @@ def id(self) -> int: def frame(self): return self._frame + @property + def jd1(self): + return self._jd1 + + @property + def jd2(self): + return self._jd2 + @property def energy_keV(self) -> float: return self._energy @@ -62,7 +70,7 @@ def scattered_lon_rad(self) -> float: return self._chi @property - def scattered_lat_radians(self) -> float: + def scattered_lat_rad(self) -> float: return self._psi class TimeTagEmCDSEventDataInSCFrameFromArrays(TimeTagEmCDSEventDataInSCFrameInterface): @@ -129,10 +137,10 @@ def __init__(self, new_id.append(event.id) new_jd1.append(event.jd1) new_jd2.append(event.jd2) - new_energy.append(event.energy) - new_phi.append(event.phi) - new_psi.append(event.psi) - new_chi.append(event.chi) + new_energy.append(event.energy_keV) + new_phi.append(event.scattering_angle_rad) + new_psi.append(event.scattered_lat_rad) + new_chi.append(event.scattered_lon_rad) nevents += 1 self._nevents = nevents @@ -142,7 +150,7 @@ def __init__(self, self._jd2 = np.asarray(new_jd2) self._energy = np.asarray(new_energy) self._phi = np.asarray(new_phi) - self._psi = np.asarray(new_psi) + self._psi = np.pi/2 - np.asarray(new_psi) #Psi is colatitude self._chi = np.asarray(new_chi) def __getitem__(self, i: int) -> TimeTagEmCDSEventInSCFrameInterface: @@ -170,7 +178,7 @@ def jd1(self) -> Iterable[float]: @property def jd2(self) -> Iterable[float]: - return self._jd1 + return self._jd2 @property def energy_rad(self) -> Iterable[float]: @@ -190,17 +198,41 @@ def scattered_lat_rad(self) -> Iterable[float]: class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): - def __init__(self, data_path: Path): + def __init__(self, *data_path: Tuple[Path], + selection:EventSelectorInterface = None): - # get_dict_from_fits is really a static method, no config file needed - data_dict = UnBinnedData.get_dict_from_fits(None, data_path) - time = Time(data_dict['TimeTags'], format='unix') - energy = u.Quantity(data_dict['Energies'], u.keV) - phi = Angle(data_dict['Phi'], u.rad) - psichi = SkyCoord(data_dict['Chi local'], np.pi / 2 - data_dict['Psi local'], unit=u.rad, + time = np.empty(0) + energy = np.empty(0) + phi = np.empty(0) + psi = np.empty(0) + chi = np.empty(0) + + for file in data_path: + # get_dict_from_fits is really a static method, no config file needed + data_dict = UnBinnedData.get_dict_from_fits(None, file) + + time = np.append(time, data_dict['TimeTags']) + energy = np.append(energy, data_dict['Energies']) + phi = np.append(phi, data_dict['Phi']) + psi = np.append(psi, data_dict['Psi local']) + chi = np.append(psi, data_dict['Chi local']) + + # Time sort + tsort = np.argsort(time) + + time = time[tsort] + energy = energy[tsort] + phi = phi[tsort] + psi = psi[tsort] + chi = chi[tsort] + + time = Time(time, format='unix') + energy = u.Quantity(energy, u.keV) + phi = Angle(phi, u.rad) + psichi = SkyCoord(chi, np.pi / 2 - psi, unit=u.rad, frame=SpacecraftFrame()) - super().__init__(time, energy, phi, psichi) + super().__init__(time, energy, phi, psichi, selection = selection) diff --git a/cosipy/event_selection/__init__.py b/cosipy/event_selection/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cosipy/event_selection/time_selection.py b/cosipy/event_selection/time_selection.py new file mode 100644 index 00000000..40fbf383 --- /dev/null +++ b/cosipy/event_selection/time_selection.py @@ -0,0 +1,66 @@ +import itertools +from typing import Union, Iterable + +import numpy as np +from astropy.time import Time + +from cosipy.interfaces import TimeTagEventInterface, EventInterface +from cosipy.interfaces.event_selection import EventSelectorInterface + + +class TimeSelector(EventSelectorInterface): + + def __init__(self, tstart:Time = None, tstop:Time = None): + """ + Assumes events are time-ordered + + Parameters + ---------- + tstart + tstop + """ + + self._tstart = tstart + self._tstop = tstop + + def _select(self, event:TimeTagEventInterface) -> bool: + # Single event + return next(iter(self.select([event]))) + + def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: + + if isinstance(events, EventInterface): + # Single event + return self._select(events) + else: + # Multiple + + # Working in chunks/batches. + # This can optimized based on the system + + def chunks(): + chunk_size = 10000 + it = iter(events) + while chunk := tuple(itertools.islice(it, chunk_size)): + yield chunk + + for chunk in chunks(): + + jd1 = [] + jd2 = [] + + for event in chunk: + jd1.append(event.jd1) + jd2.append(event.jd2) + + time = Time(jd1, jd2, format = 'jd') + + selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), + np.logical_or(self._tstop is None, time <= self._tstop)) + + for sel in selected: + yield sel + + if self._tstop is not None and time[-1] > self._tstop: + # Stop further loading of event + return diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index f93742c7..4b5b954f 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -102,7 +102,7 @@ def scattering_angle(self) -> Angle: def scattered_lon_rad(self) -> float: ... @property - def scattered_lat_radians(self) -> float: ... + def scattered_lat_rad(self) -> float: ... @property def scattered_direction(self) -> SkyCoord: @@ -110,7 +110,7 @@ def scattered_direction(self) -> SkyCoord: Add fancy energy quantity """ return SkyCoord(self.scattered_lon_rad, - np.pi/2 - self.scattered_lat_radians, + np.pi/2 - self.scattered_lat_rad, unit=u.rad, frame=self.frame) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index eb1e5efa..9e9d1ebf 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -7,6 +7,7 @@ from cosipy import test_data, BinnedData, UnBinnedData from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventDataInSCFrameFromArrays, \ TimeTagEmCDSEventDataInSCFrameFromDC3Fits +from cosipy.event_selection.time_selection import TimeSelector from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse from cosipy.util import fetch_wasabi_file @@ -43,7 +44,14 @@ def main(): fetch_wasabi_file('COSI-SMEX/DC3/Data/Sources/crab_standard_3months_unbinned_data_filtered_with_SAAcut.fits.gz', output=str(crab_data_path), checksum='1d73e7b9e46e51215738075e91a52632') - data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path) + bkg_data_path = data_path / "AlbedoPhotons_3months_unbinned_data_filtered_with_SAAcut.fits.gz" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Backgrounds/Ge/AlbedoPhotons_3months_unbinned_data_filtered_with_SAAcut.fits.gz', + output=str(bkg_data_path), checksum='191a451ee597fd2e4b1cf237fc72e6e2') + + selector = TimeSelector(tstart = Time("2028-03-01 01:35:00.117"), tstop = Time("2028-03-03 01:35:00.117")) #About 3 days + + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path, bkg_data_path, + selection=selector) return diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 8d21f5a2..bc8030fe 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -10,6 +10,7 @@ from numpy.ma.core import logical_or from cosipy import SpacecraftHistory +from cosipy.event_selection.time_selection import TimeSelector from cosipy.interfaces.background_interface import BackgroundDensityInterface from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventDataInterface from cosipy.interfaces.event import EventMetadata @@ -354,64 +355,6 @@ def get_binned_data(event_data:ToyEventData, axis:Axis) -> ToyBinnedData: return ToyBinnedData(binned_data) -class ToyTimeSelector(EventSelectorInterface): - - def __init__(self, tstart:Time = None, tstop:Time = None): - """ - Assumes events are time-ordered - - Parameters - ---------- - tstart - tstop - """ - - self._tstart = tstart - self._tstop = tstop - - def _select(self, event:TimeTagEventInterface) -> bool: - # Single event - return next(iter(self.select([event]))) - - def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: - - if isinstance(events, EventInterface): - # Single event - return self._select(events) - else: - # Multiple - - # Example of working in chunks/batches. This can optimize the - # code sometimes. - - def chunks(): - chunk_size = 1000 - it = iter(events) - while chunk := tuple(itertools.islice(it, chunk_size)): - yield chunk - - for chunk in chunks(): - - jd1 = [] - jd2 = [] - - for event in chunk: - jd1.append(event.jd1) - jd2.append(event.jd2) - - time = Time(jd1, jd2, format = 'jd') - - selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), - np.logical_or(self._tstop is None, time <= self._tstop)) - - for sel in selected: - yield sel - - if self._tstop is not None and time[-1] > self._tstop: - # Stop further loading of event - return - - # ======= Actual code. This is how the "tutorial" will look like ================ def main(): @@ -426,7 +369,7 @@ def main(): tstart = Time("2000-01-01T01:00:00") tstop = Time("2000-01-01T10:00:00") duration = tstop - tstart - selector = ToyTimeSelector(tstart = tstart, tstop = tstop) + selector = TimeSelector(tstart = tstart, tstop = tstop) data_loader = ToyEventDataLoader() event_data = ToyEventData(data_loader, selector=selector) From 1921e6e58ecc74f7519b69c2cb5360acf1f2cbb9 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 4 Oct 2025 10:51:08 -0400 Subject: [PATCH 082/133] Changes to make interfaces work with latest develop branch Signed-off-by: Israel Martinez --- cosipy/__init__.py | 2 +- cosipy/response/FullDetectorResponse.py | 4 + cosipy/response/instrument_response.py | 79 +++++++++++++------ ...mple_crab_fit_threeml_plugin_interfaces.py | 9 ++- ...ample_grb_fit_threeml_plugin_interfaces.py | 8 +- 5 files changed, 74 insertions(+), 28 deletions(-) diff --git a/cosipy/__init__.py b/cosipy/__init__.py index 827a70c8..a754ef04 100644 --- a/cosipy/__init__.py +++ b/cosipy/__init__.py @@ -11,7 +11,7 @@ from .threeml import Band_Eflux -from .spacecraftfile import SpacecraftFile +from .spacecraftfile import SpacecraftHistory from .ts_map import FastTSMap, MOCTSMap diff --git a/cosipy/response/FullDetectorResponse.py b/cosipy/response/FullDetectorResponse.py index 516586b4..1df0b3f4 100644 --- a/cosipy/response/FullDetectorResponse.py +++ b/cosipy/response/FullDetectorResponse.py @@ -162,6 +162,10 @@ def axes(self): """ return self._axes + @property + def measurement_axes(self): + return self.axes['Em', 'Phi', 'PsiChi'] + @property def dtype(self): """ diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index 70136060..704fd95c 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -198,20 +198,16 @@ def _differential_effective_area_inertial(self, """ - # Get response in local coordinates - direction = direction.transform_to(SpacecraftFrame(attitude=attitude)) - - # TODO: Change to get_pixel(pix, weight) after PR 364 - dr_pix = self._dr[self._dr.ang2pix(direction)] - - dr_pix.axes['PsiChi'].coordsys = SpacecraftFrame(attitude=attitude) - # Generate axes that will allow us to use _sum_rot_hist, # and obtain the same results as in v3.x out_axes = [self._dr.axes['Ei']] + + if self.is_polarization_response: + raise RuntimeError("Fix me. No pol yet") + # Since we're doing a 0-th order interpolation, the only thing that matter are the bin centers, # so we're placing them at the input polarization angles @@ -226,28 +222,67 @@ def _differential_effective_area_inertial(self, out_axes += [PolarizationAxis(pol_edges, convention = polarization.convention)] out_axes += list(axes) + out_axes = Axes(out_axes) + + if weight is None: + # Weight takes the role of the exposure in _sum_rot_hist, which is not an optional argument + weight = 1 + + # Almost copy-paste from FullDetectorResponse.get_point_source_response(). Improve to avoid duplicated code + def rotate_coords(c, rot): + """ + Apply a rotation matrix to one or more 3D directions + represented as Cartesian 3-vectors. Return rotated directions + in polar form as a pair (co-latitude, longitude) in + radians. + + """ + c_local = rot @ c - # Either initialize a new + c_x, c_y, c_z = c_local + + theta = np.arctan2(c_y, c_x) + phi = np.arccos(c_z) + + return (phi, theta) + + rot = attitude.transform_to('icrs').rot.inv().as_matrix() + + src_cart = direction.transform_to('icrs').cartesian.xyz.value + loc_src_colat, loc_src_lon = rotate_coords(src_cart, rot) + loc_src_pixels = self._dr._axes['NuLambda'].find_bin(theta=loc_src_colat, + phi=loc_src_lon) + + sf_psichi_axis = axes['PsiChi'] + sf_psichi_dirs = sf_psichi_axis.pix2skycoord(np.arange(sf_psichi_axis.nbins)) + sf_psichi_dirs_cart = sf_psichi_dirs.transform_to('icrs').cartesian.xyz.value + loc_psichi_colat, loc_psichi_lon = rotate_coords(sf_psichi_dirs_cart, rot) + loc_psichi_pixels = self._dr._axes['PsiChi'].find_bin(theta=loc_psichi_colat, + phi=loc_psichi_lon) + + + # Either initialize a new or clear cache if out is None: - out = Histogram(out_axes, - unit = dr_pix.unit) + out = Quantity(np.zeros(out_axes.shape), dr_pix.unit) else: if not add_inplace: - out.fill(0.) + out[:] = 0 - out = Histogram(out_axes, - contents = out, - copy_contents=False) + if isinstance(weight, u.Quantity): + weight_unit = weight.unit + weight = weight.value + else: + weight_unit = None + + self._dr._add_rot_psrs(out_axes, weight, + loc_psichi_pixels, + (loc_src_pixels,), (out.value,)) - if weight is None: - # Weight takes the role of the exposure in _sum_rot_hist, which is not an optional argument - weight = 1 - FullDetectorResponse._sum_rot_hist(dr_pix, out, weight, - axis = 'PsiChi', - pol_axis = 'Pol') + if weight_unit is not None: + out = u.Quantity(out.value, weight_unit*out.unit, copy = False) - return out.contents + return out diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 6088baf2..523db28b 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -126,10 +126,11 @@ def main(): # In[10]: # Before and after Jeremy's changes - dr_path_zip = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip" # path to detector response - dr_path = dr_path_zip.with_suffix('') - fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', - output = str(dr_path_zip), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response + fetch_wasabi_file( + 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + output=str(dr_path), + checksum='eb72400a1279325e9404110f909c7785') # dr_path = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response # fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index 7da237c4..787a67a6 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -1,5 +1,7 @@ import logging +from cosipy.util import fetch_wasabi_file + logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s' @@ -43,6 +45,11 @@ def main(): # fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(data_path / '20280301_3_month_with_orbital_info.ori'), checksum = '416fcc296fc37a056a069378a2d30cb2') # fetch_wasabi_file('COSI-SMEX/DC2/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip', output=str(data_path / 'SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5.zip'), unzip = True, checksum = 'e8ff763c5d9e63d3797567a4a51d9eda') + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response + fetch_wasabi_file( + 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + output=str(dr_path), + checksum='eb72400a1279325e9404110f909c7785') # Set model to fit l = 93. @@ -90,7 +97,6 @@ def main(): ori = ori.select_interval(tmin, tmax) # Function changed name during refactoring # Prepare instrument response - dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.nonsparse_nside8.area.good_chunks_unzip.h5" dr = FullDetectorResponse.open(dr_path) # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. From 5facc1358ca56cbfc45ce931fa9e69fa907f639e Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 7 Oct 2025 17:03:26 -0400 Subject: [PATCH 083/133] PSR interp trapz wip. Need to fix a few stuff still, but existing examples are working. Signed-off-by: Israel Martinez --- cosipy/data_io/EmCDSUnbinnedData.py | 118 +++++----- cosipy/event_selection/time_selection.py | 15 +- cosipy/interfaces/data_interface.py | 14 ++ cosipy/interfaces/event.py | 12 +- .../instrument_response_interface.py | 10 +- cosipy/interfaces/photon_list.py | 4 +- cosipy/interfaces/photon_parameters.py | 8 +- .../interfaces/source_response_interface.py | 4 +- .../response/instrument_response_function.py | 84 +++++++ cosipy/response/photon_types.py | 25 +++ .../response/threeml_point_source_response.py | 1 - cosipy/spacecraftfile/spacecraft_file.py | 3 + cosipy/statistics/likelihood_functions.py | 15 +- cosipy/threeml/psr_fixed_ei.py | 207 ++++++++++++++++++ cosipy/util/iterables.py | 16 ++ ..._fit_threeml_plugin_unbinned_interfaces.py | 96 +++++++- 16 files changed, 545 insertions(+), 87 deletions(-) create mode 100644 cosipy/response/instrument_response_function.py create mode 100644 cosipy/response/photon_types.py create mode 100644 cosipy/threeml/psr_fixed_ei.py create mode 100644 cosipy/util/iterables.py diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py index 4eb2f5bb..9a8b1b55 100644 --- a/cosipy/data_io/EmCDSUnbinnedData.py +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -11,35 +11,33 @@ from cosipy import UnBinnedData from cosipy.interfaces import EventWithEnergyInterface, EventDataInterface, EventDataWithEnergyInterface from cosipy.interfaces.data_interface import ComptonDataSpaceEventDataInterface, TimeTagEmCDSEventDataInSCFrameInterface -from cosipy.interfaces.event import ComptonDataSpaceEventInterface, TimeTagEmCDSEventInSCFrameInterface +from cosipy.interfaces.event import ComptonDataSpaceEventInterface, TimeTagEmCDSEventInSCFrameInterface, \ + EmCDSEventInSCFrameInterface import astropy.units as u from cosipy.interfaces.event_selection import EventSelectorInterface - -class TimeTagEmCDSEventInSCFrame(TimeTagEmCDSEventInSCFrameInterface): +class EmCDSEventInSCFrame(EmCDSEventInSCFrameInterface): _frame = SpacecraftFrame() - def __init__(self, id, jd1, jd2, energy, phi, psi, chi): + def __init__(self, energy, scatt_angle, scatt_lon, scatt_lat, event_id = None): """ Parameters ---------- jd1: julian days jd2: julian days energy: keV - phi: scattering angle radians - psi: scattering latitude radians - chi: scattering longitude radians + scatt_angle: scattering angle radians + scatt_lon: scattering longitude radians + scatt_lat: scattering latitude radians """ - self._id = id - self._jd1 = jd1 - self._jd2 = jd2 + self._id = event_id self._energy = energy - self._phi = phi - self._psi = psi - self._chi = chi + self._scatt_angle = scatt_angle + self._scatt_lat = scatt_lat + self._scatt_lon = scatt_lon @property def id(self) -> int: @@ -49,29 +47,49 @@ def id(self) -> int: def frame(self): return self._frame - @property - def jd1(self): - return self._jd1 - - @property - def jd2(self): - return self._jd2 - @property def energy_keV(self) -> float: return self._energy @property def scattering_angle_rad(self) -> float: - return self._phi + return self._scatt_angle @property def scattered_lon_rad(self) -> float: - return self._chi + return self._scatt_lon @property def scattered_lat_rad(self) -> float: - return self._psi + return self._scatt_lat + +class TimeTagEmCDSEventInSCFrame(EmCDSEventInSCFrame, TimeTagEmCDSEventInSCFrameInterface): + + def __init__(self, jd1, jd2, energy, scatt_angle, scatt_lon, scatt_lat, event_id=None): + """ + Parameters + ---------- + jd1: julian days + jd2: julian days + energy: keV + scatt_angle: scattering angle radians + scatt_lon: scattering longitude radians + scatt_lat: scattering latitude radians + """ + super().__init__(energy, scatt_angle, scatt_lon, scatt_lat, event_id) + + self._jd1 = jd1 + self._jd2 = jd2 + + @property + def jd1(self): + return self._jd1 + + @property + def jd2(self): + return self._jd2 + + class TimeTagEmCDSEventDataInSCFrameFromArrays(TimeTagEmCDSEventDataInSCFrameInterface): """ @@ -93,32 +111,32 @@ def __init__(self, Parameters ---------- time - energy: keV - scattering_angle: scattering angle radians - psi: scattering latitude radians - chi: scattering longitude radians - id: range(size) by default - selection: Optional selection for TimeTagEmCDSEventInSCFrame events + energy + scattering_angle + scattered_direction + event_id + selection """ + self._jd1 = time.jd1 self._jd2 = time.jd2 self._energy = energy.to_value(u.keV) - self._phi = scattering_angle.to_value(u.rad) + self._scatt_angle = scattering_angle.to_value(u.rad) if not isinstance(scattered_direction.frame, SpacecraftFrame): raise ValueError("Coordinates need to be in SC frame") scattered_direction = scattered_direction.represent_as(UnitSphericalRepresentation) - self._psi = scattered_direction.lat.rad - self._chi = scattered_direction.lon.rad + self._scatt_lat = scattered_direction.lat.rad + self._scatt_lon = scattered_direction.lon.rad if event_id is None: self._id = np.arange(self._jd1.size) else: self._id = np.asarray(event_id) # Check size - self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi = np.broadcast_arrays(self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi) + self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon = np.broadcast_arrays(self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon) self._nevents = self._id.size @@ -128,9 +146,9 @@ def __init__(self, new_jd1 = [] new_jd2 = [] new_energy = [] - new_phi = [] - new_psi = [] - new_chi = [] + new_scatt_angle = [] + new_scatt_lat = [] + new_scatt_lon = [] nevents = 0 for event in selection(self): @@ -138,9 +156,9 @@ def __init__(self, new_jd1.append(event.jd1) new_jd2.append(event.jd2) new_energy.append(event.energy_keV) - new_phi.append(event.scattering_angle_rad) - new_psi.append(event.scattered_lat_rad) - new_chi.append(event.scattered_lon_rad) + new_scatt_angle.append(event.scattering_angle_rad) + new_scatt_lat.append(event.scattered_lat_rad) + new_scatt_lon.append(event.scattered_lon_rad) nevents += 1 self._nevents = nevents @@ -149,20 +167,21 @@ def __init__(self, self._jd1 = np.asarray(new_jd1) self._jd2 = np.asarray(new_jd2) self._energy = np.asarray(new_energy) - self._phi = np.asarray(new_phi) - self._psi = np.pi/2 - np.asarray(new_psi) #Psi is colatitude - self._chi = np.asarray(new_chi) + self._scatt_angle = np.asarray(new_scatt_angle) + self._scatt_lat = np.asarray(new_scatt_lat) + self._scatt_lon = np.asarray(new_scatt_lon) def __getitem__(self, i: int) -> TimeTagEmCDSEventInSCFrameInterface: - return TimeTagEmCDSEventInSCFrame(self._id[i], self._jd1[i], self._jd2[i], self._energy[i], self._phi[i], self._psi[i], self._chi[i]) + return TimeTagEmCDSEventInSCFrame(self._jd1[i], self._jd2[i], self._energy[i], self._scatt_angle[i], + self._id[i]) @property def nevents(self) -> int: return self._nevents def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]: - for id, jd1, jd2, energy, phi, psi, chi in zip(self._id, self._jd1, self._jd2, self._energy, self._phi, self._psi, self._chi): - yield TimeTagEmCDSEventInSCFrame(id, jd1, jd2, energy, phi, psi, chi) + for id, jd1, jd2, energy, scatt_angle, scatt_lat, scatt_lon in zip(self._id, self._jd1, self._jd2, self._energy, self._scatt_angle, self._scatt_lat, self._scatt_lon): + yield TimeTagEmCDSEventInSCFrame(jd1, jd2, energy, scatt_angle, scatt_lon, scatt_lat, id) @property def frame(self) -> SpacecraftFrame: @@ -186,15 +205,15 @@ def energy_rad(self) -> Iterable[float]: @property def scattering_angle_rad(self) -> Iterable[float]: - return self._phi + return self._scatt_angle @property def scattered_lon_rad(self) -> Iterable[float]: - return self._chi + return self._scatt_lon @property def scattered_lat_rad(self) -> Iterable[float]: - return self._phi + return self._scatt_angle class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): @@ -229,6 +248,7 @@ def __init__(self, *data_path: Tuple[Path], time = Time(time, format='unix') energy = u.Quantity(energy, u.keV) phi = Angle(phi, u.rad) + # Psi is colatitude (latitude complementary angle) psichi = SkyCoord(chi, np.pi / 2 - psi, unit=u.rad, frame=SpacecraftFrame()) diff --git a/cosipy/event_selection/time_selection.py b/cosipy/event_selection/time_selection.py index 40fbf383..4bdecd67 100644 --- a/cosipy/event_selection/time_selection.py +++ b/cosipy/event_selection/time_selection.py @@ -6,16 +6,19 @@ from cosipy.interfaces import TimeTagEventInterface, EventInterface from cosipy.interfaces.event_selection import EventSelectorInterface +from cosipy.util.iterables import itertools_batched class TimeSelector(EventSelectorInterface): - def __init__(self, tstart:Time = None, tstop:Time = None): + def __init__(self, tstart:Time = None, tstop:Time = None, batch_size:int = 10000): """ Assumes events are time-ordered Parameters ---------- + chunk_size : object + Number of events processed at a time tstart tstop """ @@ -23,6 +26,8 @@ def __init__(self, tstart:Time = None, tstop:Time = None): self._tstart = tstart self._tstop = tstop + self._batch_size = batch_size + def _select(self, event:TimeTagEventInterface) -> bool: # Single event return next(iter(self.select([event]))) @@ -38,13 +43,7 @@ def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterf # Working in chunks/batches. # This can optimized based on the system - def chunks(): - chunk_size = 10000 - it = iter(events) - while chunk := tuple(itertools.islice(it, chunk_size)): - yield chunk - - for chunk in chunks(): + for chunk in itertools_batched(events, self._batch_size): jd1 = [] jd2 = [] diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 4e7b616d..d1ed8f89 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -40,6 +40,20 @@ class BinnedDataInterface(DataInterface, Protocol): def data(self) -> Histogram:... @property def axes(self) -> Axes:... + def fill(self, event_data:Iterable[EventInterface]): + """ + Bin the data. + + Parameters + ---------- + event_data + + Returns + ------- + + """ + + @runtime_checkable class EventDataInterface(DataInterface, Protocol): diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index 4b5b954f..23b1221e 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -127,9 +127,17 @@ class ComptonDataSpaceInSCFrameEventInterface(EventInSCFrameInterface, Protocol): pass +@runtime_checkable +class EmCDSEventInSCFrameInterface(EventInSCFrameInterface, + EventWithEnergyInterface, + ComptonDataSpaceEventInterface, + Protocol): + pass + +@runtime_checkable class TimeTagEmCDSEventInSCFrameInterface(TimeTagEventInterface, - EventWithEnergyInterface, - ComptonDataSpaceInSCFrameEventInterface): + EmCDSEventInSCFrameInterface, + Protocol): pass diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index 1c0bd4b3..b1e144a1 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -10,7 +10,7 @@ from cosipy.interfaces import BinnedDataInterface, ExpectationDensityInterface, BinnedExpectationInterface, EventInterface from cosipy.interfaces.photon_list import PhotonListWithDirectionInterface -from cosipy.interfaces.photon_parameters import PhotonInterface +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionInterface from cosipy.polarization import PolarizationAngle __all__ = ["BinnedInstrumentResponseInterface"] @@ -71,19 +71,19 @@ def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInte """ @runtime_checkable -class NearFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): +class FarFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): - def effective_area_cm2(self, photons: PhotonListWithDirectionInterface) -> Iterable[float]: + def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionInterface]) -> Iterable[float]: """ """ - def effective_area(self, photons: PhotonListWithDirectionInterface) -> Iterable[u.Quantity]: + def effective_area(self, photons: Iterable[PhotonWithDirectionInterface]) -> Iterable[u.Quantity]: """ Convenience function """ for area_cm2 in self.effective_area_cm2(photons): - yield u.Quantity(area_cm2, u.cm2) + yield u.Quantity(area_cm2, u.cm*u.cm) diff --git a/cosipy/interfaces/photon_list.py b/cosipy/interfaces/photon_list.py index a0e1ff5a..a6cb752d 100644 --- a/cosipy/interfaces/photon_list.py +++ b/cosipy/interfaces/photon_list.py @@ -78,7 +78,5 @@ class PhotonListInSCFrameInterface(PhotonListInterface, Protocol): @property def frame(self) -> SpacecraftFrame:... -class PhotonWithDirectionInSCFrameInterface(PhotonListWithDirectionInterface, - PhotonListInSCFrameInterface): - pass + diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py index b8570ca9..0e4815e6 100644 --- a/cosipy/interfaces/photon_parameters.py +++ b/cosipy/interfaces/photon_parameters.py @@ -54,4 +54,10 @@ def frame(self) -> SpacecraftFrame:... class PhotonWithDirectionInSCFrameInterface(PhotonWithDirectionInterface, PhotonInSCFrameInterface): - pass \ No newline at end of file + pass + +class PhotonWithDirectionAndEnergyInSCFrameInterface(PhotonWithDirectionInSCFrameInterface, + PhotonWithEnergyInterface): + pass + + diff --git a/cosipy/interfaces/source_response_interface.py b/cosipy/interfaces/source_response_interface.py index 5474cc20..ba2e488b 100644 --- a/cosipy/interfaces/source_response_interface.py +++ b/cosipy/interfaces/source_response_interface.py @@ -47,8 +47,8 @@ def copy(self) -> "ThreeMLSourceResponseInterface": """ This method is used to re-use the same object for multiple sources. - It is expected to return a copy of itself, but deepcopy it's - and any other necessary information such that when + It is expected to return a safe copy of itself + such that when a new source is set, the expectation calculation are independent. diff --git a/cosipy/response/instrument_response_function.py b/cosipy/response/instrument_response_function.py new file mode 100644 index 00000000..31a928ff --- /dev/null +++ b/cosipy/response/instrument_response_function.py @@ -0,0 +1,84 @@ +import itertools +from typing import Iterable, Tuple + +import numpy as np +from astropy.coordinates import SkyCoord + +from astropy import units as u +from astropy.units import Quantity + +from histpy import Histogram +from scoords import SpacecraftFrame + +from cosipy.interfaces import EventInterface +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface, EmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.interfaces.photon_list import PhotonListWithDirectionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionAndEnergyInSCFrameInterface +from cosipy.response import FullDetectorResponse +from cosipy.util.iterables import itertools_batched + + +class UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(FarFieldInstrumentResponseFunctionInterface): + + def __init__(self, response: FullDetectorResponse, + batch_size = 100000): + + self._prob = response.to_dr().project('NuLambda', 'Ei', 'Em', 'Phi', 'PsiChi') + + self._area_eff = self._prob.project('NuLambda', 'Ei') + + # expand_dims removes units + self._prob /= Quantity(self._prob.axes.expand_dims(self._area_eff, ('NuLambda', 'Ei')), self._area_eff.unit, copy=False) + + self._prob.to('', copy=False) + self._area_eff = self._area_eff.to(u.cm*u.cm, copy=False) + + self._batch_size = batch_size + + def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionAndEnergyInSCFrameInterface]) -> Iterable[float]: + """ + + """ + + for photon_chunk in itertools_batched(photons, self._batch_size): + + lon, lat, energy = np.asarray([[photon.direction_lon_radians, + photon.direction_lat_radians, + photon.energy_keV] for photon in photon_chunk], dtype=float).transpose() + + direction = SkyCoord(lon, lat, unit = u.rad, frame = SpacecraftFrame()) + energy = Quantity(energy, u.keV) + + for area_eff in self._area_eff.interp(direction, energy): + yield area_eff.value + + + def event_probability(self, query: Iterable[Tuple[PhotonWithDirectionAndEnergyInSCFrameInterface, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + """ + + for query_chunk in itertools_batched(query, self._batch_size): + + # Psi is colatitude (complementary angle) + lon_ph, lat_ph, energy_i, energy_m, phi, psi_comp, chi = \ + np.asarray([[photon.direction_lon_radians, + photon.direction_lat_radians, + photon.energy_keV, + event.energy_keV, + event.scattering_angle_rad, + event.scattered_lat_rad, + event.scattered_lon_rad, + ] for photon,event in query_chunk], dtype=float).transpose() + + direction_ph = SkyCoord(lon_ph, lat_ph, unit = u.rad, frame = SpacecraftFrame()) + energy_i = Quantity(energy_i, u.keV) + energy_m = Quantity(energy_m, u.keV) + phi = Quantity(phi, u.rad) + psichi = SkyCoord(chi, psi_comp, unit=u.rad, frame=SpacecraftFrame()) + + # Prob not guaranteed to sum up to 1. We should take self._prob.slice instead. + # I think this is faster though, and a good approximation. + for prob in self._prob.interp(direction_ph, energy_i, energy_m, phi, psichi): + yield prob \ No newline at end of file diff --git a/cosipy/response/photon_types.py b/cosipy/response/photon_types.py new file mode 100644 index 00000000..f092d686 --- /dev/null +++ b/cosipy/response/photon_types.py @@ -0,0 +1,25 @@ +from scoords import SpacecraftFrame + +from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface + + +class PhotonWithDirectionAndEnergyInSCFrame(PhotonWithDirectionAndEnergyInSCFrameInterface): + + frame = SpacecraftFrame() + + def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV): + self._energy = energy_keV + self._lon = direction_lon_radians + self._lat = direction_lat_radians + + @property + def energy_keV(self) -> float: + return self._energy + + @property + def direction_lon_radians(self) -> float: + return self._lon + + @property + def direction_lat_radians(self) -> float: + return self._lat diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 0a625257..1fccabd4 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -101,7 +101,6 @@ def __init__(self, def clear_cache(self): - self._source = None self._last_convolved_source_dict = None self._expectation = None self._last_convolved_source_skycoord = None diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 793cbac2..e842b4f4 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -258,6 +258,9 @@ def _interp_attitude(self, points, weights) -> Attitude: rot_matrix = self._attitude.as_matrix() + # In case of multiple points + weights = np.expand_dims(weights, (weights.ndim, weights.ndim+1)) + interp_attitude = Attitude.from_matrix(rot_matrix[points[0]]*weights[0] + rot_matrix[points[1]]*weights[1], frame = self._attitude.frame) return interp_attitude diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 21e68769..56173077 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -4,6 +4,7 @@ from cosipy import UnBinnedData from cosipy.interfaces.expectation_interface import ExpectationInterface, ExpectationDensityInterface +from cosipy.util.iterables import itertools_batched logger = logging.getLogger(__name__) @@ -21,7 +22,9 @@ 'PoissonLikelihood'] class UnbinnedLikelihood(UnbinnedLikelihoodInterface): - def __init__(self, response:ExpectationDensityInterface, bkg:BackgroundDensityInterface = None): + def __init__(self, response:ExpectationDensityInterface, + bkg:BackgroundDensityInterface = None, + batch_size:int = 100000): """ Will get the number of events from the response and bkg expectation_density iterators @@ -35,6 +38,8 @@ def __init__(self, response:ExpectationDensityInterface, bkg:BackgroundDensityIn self._response = response self._nobservations = None + self._batch_size = batch_size + @property def has_bkg(self): return self._bkg is not None @@ -79,13 +84,7 @@ def get_log_like(self) -> float: nobservations = 0 density_log_sum = 0 - def chunks(): - chunk_size = 100000 - it = iter(self._get_density_iter()) - while chunk := tuple(itertools.islice(it, chunk_size)): - yield chunk - - for density_iter_chunk in chunks(): + for density_iter_chunk in itertools_batched(self._get_density_iter(), self._batch_size): density = np.fromiter(density_iter_chunk, dtype=float) density_log_sum += np.sum(np.log(density)) diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py new file mode 100644 index 00000000..60676860 --- /dev/null +++ b/cosipy/threeml/psr_fixed_ei.py @@ -0,0 +1,207 @@ +import copy +from typing import Optional, Iterable + +import numpy as np +from astromodels import PointSource +from astropy.coordinates import UnitSphericalRepresentation, CartesianRepresentation +from astropy.units import Quantity +from executing import Source +from histpy import Axis + +from cosipy import SpacecraftHistory +from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame +from cosipy.interfaces import UnbinnedThreeMLSourceResponseInterface +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.response.photon_types import PhotonWithDirectionAndEnergyInSCFrame + +from astropy import units as u + +class UnbinnedThreeMLPointSourceResponseTrapz(UnbinnedThreeMLSourceResponseInterface): + + def __init__(self, + data: TimeTagEmCDSEventDataInSCFrameInterface, + irf:FarFieldInstrumentResponseFunctionInterface, + sc_history: SpacecraftHistory, + energies:Quantity): + """ + Will integrate the spectrum by evaluation the IRF at fixed Ei position and using a simple + trapezoidal rule + + All IRF queries are cached + + Parameters + ---------- + irf + energies: evaluation points + """ + + # Interface inputs + self._source = None + + # Other implementation inputs + self._data = data + self._irf = irf + self._energies_keV = energies.to_value(u.keV) + self._sc_ori = sc_history + + # This can be computed once and for all + # Trapezoidal rule weights to integrate in Ei + ewidths = np.diff(self._energies_keV) + self._trapz_weights = np.zeros_like(self._energies_keV) + self._trapz_weights[:-1] = ewidths + self._trapz_weights[1:] = ewidths + self._trapz_weights /= 2 + + self._attitude_at_event_times = self._sc_ori.interp_attitude(self._data.time) + + # Caches + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._last_convolved_source_dict = None + + # The IRF values change for each direction, but it's the same for all spectrum parameters + + # Source location cached separately since changing the response + # for a given direction is expensive + self._last_convolved_source_skycoord = None + + # For integral for nevents + # int Aeff(t, Ei) F(Ei) dt dEi + # Will need to multiply by F(Ei) and sum. + # It includes the trapezoidal rule weights + # and the time integral based on source position + # and SC history + # Once per Ei + self._nevents_weights = None # In cm2*s*keV + + # axis 0: events + # axis 1: energy_i samples + self._event_prob_weights = None + + # Integrated over Ei + self._nevents = None + self._event_prob = None + + def set_source(self, source: Source): + """ + The source is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source + + def clear_cache(self): + + self._last_convolved_source_dict = None + self._last_convolved_source_skycoord = None + self._nevents = None + self._nevents_weights = None + self._event_prob = None + self._event_prob_weights = None + + def copy(self) -> "ThreeMLSourceResponseInterface": + """ + This method is used to re-use the same object for multiple + sources. + It is expected to return a copy of itself, but deepcopying + any necessary information such that when + a new source is set, the expectation calculation + are independent. + + psr1 = ThreeMLSourceResponse() + psr2 = psr.copy() + psr1.set_source(source1) + psr2.set_source(source2) + """ + + new = copy.copy(self) + new.clear_cache() + return new + + def _update_cache(self): + """ + Performs all calculation as needed depending on the current source location + + Returns + ------- + """ + if self._source is None: + raise RuntimeError("Call set_source() first.") + + source_dict = self._source.to_dict() + coord = self._source.position.sky_coord + + if (self._nevents is not None) and (self._event_prob is not None) and self._last_convolved_source_dict == source_dict: + # Nothing has changed + return + + if (self._nevents_weights is None) or (self._event_prob_weights is None) or coord != self._last_convolved_source_skycoord: + # Updating the location is very cost intensive. Only do if necessary + + # Compute nevents integral by integrating though the SC history + # This only computes the weights based on the source location. + # Once we know the source source spectrum, we can integrate over Ei + coord_vec = coord.transform_to(self._sc_ori.attitude.frame).cartesian.xyz.value + sc_coord_vec = self._sc_ori.attitude.rot[:-1].apply(coord_vec) + sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + + # For each SC timestamp, get the effective area for each energy point, store it as temporary array, + # and multiply by livetime. + # Sum up the exposure (one per energy point) without saving it to memory + exposure = sum([dt*np.fromiter(self._irf.effective_area_cm2([PhotonWithDirectionAndEnergyInSCFrame(c.lon.rad, c.lat.rad, e) + for e in self._energies_keV]), dtype = float) + for c,dt in zip(sc_coord_sph,self._sc_ori.livetime.to_value(u.s))]) + + self._nevents_weights = exposure * self._trapz_weights + + # Get the probability for each event for the source location and each Ei + sc_coord_vec = self._attitude_at_event_times.rot[:-1].apply(coord_vec) + sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + self._event_prob_weights = np.fromiter(self._irf.event_probability([(PhotonWithDirectionAndEnergyInSCFrame(coord.lon.rad, coord.lat.rad, energy), event) + for coord,event in zip(sc_coord_sph, self._data) \ + for energy in self._energies_keV]), + dtype = float) + + self._event_prob_weights = self._event_prob_weights.reshape((sc_coord_sph.size, self._energies_keV.size)) + + # 3ML default units as cm, s and keV + flux_values = self._source(self._energies_keV) + self._nevents = np.sum(self._nevents_weights * flux_values) + self._event_prob = np.sum((self._event_prob_weights * flux_values[None, :]), axis=1) + + self._last_convolved_source_dict = source_dict + self._last_convolved_source_skycoord = coord.copy() + + @property + def ncounts(self) -> float: + """ + Total expected counts + """ + + self._update_cache() + + return self._nevents + + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. + + Parameters + ---------- + start : None | int + From beginning by default + stop: None|int + Until the end by default + """ + + self._update_cache() + + return self._event_prob[start:stop] diff --git a/cosipy/util/iterables.py b/cosipy/util/iterables.py new file mode 100644 index 00000000..ecd62561 --- /dev/null +++ b/cosipy/util/iterables.py @@ -0,0 +1,16 @@ +import itertools + +def itertools_batched(iterable, n, *, strict=False): + """ + itertools.batched was added in version 3.12. + Use the "roughly equivalent" from itertools documentation for now. + """ + + # batched('ABCDEFG', 2) → AB CD EF G + if n < 1: + raise ValueError('n must be at least one') + iterator = iter(iterable) + while batch := tuple(itertools.islice(iterator, n)): + if strict and len(batch) != n: + raise ValueError('batched(): incomplete batch') + yield batch \ No newline at end of file diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 9e9d1ebf..e62c3b12 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -1,15 +1,23 @@ #!/usr/bin/env python # coding: utf-8 +import logging +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', + level=logging.INFO) +logger = logging.getLogger(__name__) - +import cProfile from cosipy import test_data, BinnedData, UnBinnedData from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventDataInSCFrameFromArrays, \ - TimeTagEmCDSEventDataInSCFrameFromDC3Fits + TimeTagEmCDSEventDataInSCFrameFromDC3Fits, TimeTagEmCDSEventInSCFrame from cosipy.event_selection.time_selection import TimeSelector +from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface +from cosipy.response.instrument_response_function import UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction +from cosipy.response.photon_types import PhotonWithDirectionAndEnergyInSCFrame from cosipy.spacecraftfile import SpacecraftHistory from cosipy.response.FullDetectorResponse import FullDetectorResponse +from cosipy.threeml.psr_fixed_ei import UnbinnedThreeMLPointSourceResponseTrapz from cosipy.util import fetch_wasabi_file from cosipy.statistics import PoissonLikelihood @@ -29,7 +37,7 @@ import matplotlib.pyplot as plt from threeML import Band, PointSource, Model, JointLikelihood, DataList -from astromodels import Parameter +from astromodels import Parameter, Powerlaw from pathlib import Path @@ -37,6 +45,8 @@ def main(): + profile = cProfile.Profile() + # Download all data data_path = Path("") # /path/to/files. Current dir by default @@ -48,18 +58,88 @@ def main(): fetch_wasabi_file('COSI-SMEX/DC3/Data/Backgrounds/Ge/AlbedoPhotons_3months_unbinned_data_filtered_with_SAAcut.fits.gz', output=str(bkg_data_path), checksum='191a451ee597fd2e4b1cf237fc72e6e2') - selector = TimeSelector(tstart = Time("2028-03-01 01:35:00.117"), tstop = Time("2028-03-03 01:35:00.117")) #About 3 days + dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response + fetch_wasabi_file( + 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', + output=str(dr_path), + checksum='eb72400a1279325e9404110f909c7785') + + sc_orientation_path = data_path / "20280301_3_month_with_orbital_info.ori" + fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', + output=str(sc_orientation_path), checksum='416fcc296fc37a056a069378a2d30cb2') + + + profile.enable() + # orientation history + tstart = Time("2028-03-01 01:35:00.117") + tstop = Time("2028-03-01 02:35:00.117") + sc_orientation = SpacecraftHistory.open(sc_orientation_path) + sc_orientation = sc_orientation.select_interval(tstart, tstop) + + # Prepare data + selector = TimeSelector(tstart = sc_orientation.tstart, tstop = sc_orientation.tstop) + logger.info("Loading data...") data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path, bkg_data_path, selection=selector) + logger.info("Loading data DONE") + + # Prepare instrument response function + logger.info("Loading response....") + dr = FullDetectorResponse.open(dr_path) + irf = UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(dr) + logger.info("Loading response DONE") + + psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, dr.axes['Ei'].centers) + + # Set model + l = 184.56 + b = -5.78 + + index = -1.99 + piv = 500. * u.keV + K = 0.048977e-3 / u.cm / u.cm / u.s / u.keV + + spectrum = Powerlaw() + + spectrum.index.min_value = -3 + spectrum.index.max_value = -1 + + spectrum.index.value = index + spectrum.K.value = K.value + spectrum.piv.value = piv.value + + spectrum.K.unit = K.unit + spectrum.piv.unit = piv.unit + + spectrum.index.delta = 0.01 + + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model + + # Optional: free the position parameters + # source.position.l.free = True + # source.position.b.free = True + + model = Model( + source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + + psr.set_source(source) + logger.info("Updating PSR cache...") + psr._update_cache() + logger.info("Updating PSR cache DONE") + + print(psr.ncounts) + print(np.fromiter(psr.expectation_density(), dtype = float)) + + profile.disable() + profile.dump_stats("prof_interfaces.prof") return if __name__ == "__main__": - import cProfile - cProfile.run('main()', filename = "prof_interfaces.prof") - exit() - main() \ No newline at end of file From 90c09e1ada1265523bf8a73b0bd06b4f6644d6be Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 8 Oct 2025 12:07:20 -0400 Subject: [PATCH 084/133] Handle measurement phase-space Signed-off-by: Israel Martinez --- .../instrument_response_interface.py | 57 ++++++++++++++++- .../response/instrument_response_function.py | 62 ++++++++++++------- cosipy/threeml/psr_fixed_ei.py | 3 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 15 ++--- 4 files changed, 104 insertions(+), 33 deletions(-) diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index b1e144a1..4d1812b9 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -1,4 +1,6 @@ -from typing import Protocol, Union, Optional, Iterable, Tuple, runtime_checkable +import itertools +import operator +from typing import Protocol, Union, Optional, Iterable, Tuple, runtime_checkable, ClassVar from astropy.coordinates import SkyCoord from astropy.time import Time @@ -60,9 +62,16 @@ def differential_effective_area(self, @runtime_checkable class InstrumentResponseFunctionInterface(Protocol): + # The photon class and event class that the IRF implementation can handle + photon_type = ClassVar[PhotonInterface] + event_type = ClassVar[EventInterface] + def event_probability(self, query: Iterable[Tuple[PhotonInterface, EventInterface]]) -> Iterable[float]: """ Return the probability density of measuring a given event given a photon. + + The units of the output the inverse of the phase space of the class event_type data space. + e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV """ def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInterface]: @@ -78,6 +87,48 @@ def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionInterface]) -> """ + def differential_effective_area_cm2(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[float]: + """ + Event probability multiplied by effective area + + This is provided as a helper function assuming the child classes implemented event_probability + """ + + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if (cls.differential_effective_area_cm2 is FarFieldInstrumentResponseFunctionInterface.differential_effective_area_cm2 + and + cls.event_probability is FarFieldInstrumentResponseFunctionInterface.event_probability): + raise NotImplementedError("Implement differential_effective_area_cm2 and/or event_probability") + + query1, query2 = itertools.tee(query, 2) + photon_query = [photon for photon,_ in query1] + + return map(operator.mul, self.effective_area_cm2(photon_query), self.event_probability(query2)) + + def event_probability(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + + In the far field case it is the same as the differential_effective_area_cm2 divided by the effective area + + This is provided as a helper function assuming the child classes implemented differential_effective_area_cm2 + """ + + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.differential_effective_area_cm2 is FarFieldInstrumentResponseFunctionInterface.differential_effective_area_cm2 + and + cls.event_probability is FarFieldInstrumentResponseFunctionInterface.event_probability): + raise NotImplementedError("Implement differential_effective_area_cm2 and/or event_probability") + + query1, query2 = itertools.tee(query, 2) + photon_query = [photon for photon, _ in query1] + + return map(operator.truediv, self.differential_effective_area_cm2(query2), self.effective_area_cm2(photon_query)) + + def effective_area(self, photons: Iterable[PhotonWithDirectionInterface]) -> Iterable[u.Quantity]: """ Convenience function @@ -85,6 +136,10 @@ def effective_area(self, photons: Iterable[PhotonWithDirectionInterface]) -> Ite for area_cm2 in self.effective_area_cm2(photons): yield u.Quantity(area_cm2, u.cm*u.cm) + def differential_effective_area(self, query: Iterable[Tuple[PhotonWithDirectionInterface, EventInterface]]) -> Iterable[u.Quantity]: + for area_cm2 in self.differential_effective_area(query): + yield u.Quantity(area_cm2, u.cm*u.cm) + diff --git a/cosipy/response/instrument_response_function.py b/cosipy/response/instrument_response_function.py index 31a928ff..b4989b4c 100644 --- a/cosipy/response/instrument_response_function.py +++ b/cosipy/response/instrument_response_function.py @@ -21,18 +21,39 @@ class UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(FarFieldInstrumentResponseFunctionInterface): + photon_type = PhotonWithDirectionAndEnergyInSCFrameInterface + event_type = EmCDSEventInSCFrameInterface + def __init__(self, response: FullDetectorResponse, batch_size = 100000): - self._prob = response.to_dr().project('NuLambda', 'Ei', 'Em', 'Phi', 'PsiChi') - - self._area_eff = self._prob.project('NuLambda', 'Ei') - - # expand_dims removes units - self._prob /= Quantity(self._prob.axes.expand_dims(self._area_eff, ('NuLambda', 'Ei')), self._area_eff.unit, copy=False) - - self._prob.to('', copy=False) - self._area_eff = self._area_eff.to(u.cm*u.cm, copy=False) + # Get the differential effective area, which is still integrated on each bin at this point + # FarFieldInstrumentResponseFunctionInterface uses cm2 + # First convert and then drop the units + self._diff_area = response.to_dr().project('NuLambda', 'Ei', 'Em', 'Phi', 'PsiChi').to(u.cm * u.cm, copy=False).to(None, copy = False, update = False) + + # Now fix units for the axes + # PhotonWithDirectionAndEnergyInSCFrameInterface has energy in keV + # EmCDSEventInSCFrameInterface has energy in keV, phi in rad + # NuLambda and PsiChi don't have units since these are HealpixAxis. They take SkyCoords + # Copy the axes the first time since they are shared with the response:FullDetectorResponse input + self._diff_area.axes['Ei'] = self._diff_area.axes['Ei'].to(u.keV).to(None, copy = False, update = False) + self._diff_area.axes['Em'] = self._diff_area.axes['Em'].to(u.keV).to(None, copy = False, update = False) + self._diff_area.axes['Phi'] = self._diff_area.axes['Phi'].to(u.rad).to(None, copy = False, update = False) + + # Integrate to get the total effective area + self._area = self._diff_area.project('NuLambda', 'Ei') + + # Now make it differential by dividing by the phasespace + # EmCDSEventInSCFrameInterface energy and phi units have already been taken + # care off. Only PsiChi remains, which is a direction in the sphere, therefore per steradians + energy_phase_space = self._diff_area.axes['Ei'].widths + phi_phase_space = self._diff_area.axes['Phi'].widths + psichi_phase_space = self._diff_area.axes['PsiChi'].pixarea().to_value(u.sr) + + self._diff_area /= self._diff_area.axes.expand_dims(energy_phase_space, 'Em') + self._diff_area /= self._diff_area.axes.expand_dims(phi_phase_space, 'Phi') + self._diff_area /= psichi_phase_space self._batch_size = batch_size @@ -43,26 +64,24 @@ def effective_area_cm2(self, photons: Iterable[PhotonWithDirectionAndEnergyInSCF for photon_chunk in itertools_batched(photons, self._batch_size): - lon, lat, energy = np.asarray([[photon.direction_lon_radians, + lon, lat, energy_keV = np.asarray([[photon.direction_lon_radians, photon.direction_lat_radians, photon.energy_keV] for photon in photon_chunk], dtype=float).transpose() direction = SkyCoord(lon, lat, unit = u.rad, frame = SpacecraftFrame()) - energy = Quantity(energy, u.keV) - - for area_eff in self._area_eff.interp(direction, energy): - yield area_eff.value + for area_eff in self._area.interp(direction, energy_keV): + yield area_eff - def event_probability(self, query: Iterable[Tuple[PhotonWithDirectionAndEnergyInSCFrameInterface, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + def differential_effective_area_cm2(self, query: Iterable[Tuple[PhotonWithDirectionAndEnergyInSCFrameInterface, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: """ - Return the probability density of measuring a given event given a photon. + Return the differential effective area (probability density of measuring a given event given a photon times the effective area) """ for query_chunk in itertools_batched(query, self._batch_size): # Psi is colatitude (complementary angle) - lon_ph, lat_ph, energy_i, energy_m, phi, psi_comp, chi = \ + lon_ph, lat_ph, energy_i_keV, energy_m_keV, phi_rad, psi_comp, chi = \ np.asarray([[photon.direction_lon_radians, photon.direction_lat_radians, photon.energy_keV, @@ -73,12 +92,7 @@ def event_probability(self, query: Iterable[Tuple[PhotonWithDirectionAndEnergyIn ] for photon,event in query_chunk], dtype=float).transpose() direction_ph = SkyCoord(lon_ph, lat_ph, unit = u.rad, frame = SpacecraftFrame()) - energy_i = Quantity(energy_i, u.keV) - energy_m = Quantity(energy_m, u.keV) - phi = Quantity(phi, u.rad) psichi = SkyCoord(chi, psi_comp, unit=u.rad, frame=SpacecraftFrame()) - # Prob not guaranteed to sum up to 1. We should take self._prob.slice instead. - # I think this is faster though, and a good approximation. - for prob in self._prob.interp(direction_ph, energy_i, energy_m, phi, psichi): - yield prob \ No newline at end of file + for diff_area in self._diff_area.interp(direction_ph, energy_i_keV, energy_m_keV, phi_rad, psichi): + yield diff_area \ No newline at end of file diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py index 60676860..b3692f91 100644 --- a/cosipy/threeml/psr_fixed_ei.py +++ b/cosipy/threeml/psr_fixed_ei.py @@ -43,8 +43,9 @@ def __init__(self, # Other implementation inputs self._data = data self._irf = irf - self._energies_keV = energies.to_value(u.keV) self._sc_ori = sc_history + # Energies will later use with a PhotonWithEnergyInterface, with uses keV + self._energies_keV = energies.to_value(u.keV) # This can be computed once and for all # Trapezoidal rule weights to integrate in Ei diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index e62c3b12..38132fec 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -76,6 +76,12 @@ def main(): sc_orientation = SpacecraftHistory.open(sc_orientation_path) sc_orientation = sc_orientation.select_interval(tstart, tstop) + # Prepare instrument response function + logger.info("Loading response....") + dr = FullDetectorResponse.open(dr_path) + irf = UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(dr) + logger.info("Loading response DONE") + # Prepare data selector = TimeSelector(tstart = sc_orientation.tstart, tstop = sc_orientation.tstop) @@ -84,15 +90,10 @@ def main(): selection=selector) logger.info("Loading data DONE") - # Prepare instrument response function - logger.info("Loading response....") - dr = FullDetectorResponse.open(dr_path) - irf = UnpolarizedDC3InterpolatedFarFieldInstrumentResponseFunction(dr) - logger.info("Loading response DONE") - + # Prepare point source response, which convolved the IRF with the SC orientation psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, dr.axes['Ei'].centers) - # Set model + # Prepare the model l = 184.56 b = -5.78 From 9ec0c418b8c252d4781d44662c42caadeb6043e0 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 8 Oct 2025 14:27:36 -0400 Subject: [PATCH 085/133] unbinned model folding. bkg wip Signed-off-by: Israel Martinez --- cosipy/response/threeml_response.py | 144 ++++++++++-------- cosipy/threeml/psr_fixed_ei.py | 1 - cosipy/threeml/unbinned_model_folding.py | 67 ++++++++ ..._fit_threeml_plugin_unbinned_interfaces.py | 39 +++-- 4 files changed, 176 insertions(+), 75 deletions(-) create mode 100644 cosipy/threeml/unbinned_model_folding.py diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index 20c45c00..7bf49d73 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -1,7 +1,10 @@ import copy +from typing import Dict + +from numba.typed.dictobject import DictModel from cosipy.interfaces import BinnedThreeMLModelFoldingInterface, BinnedThreeMLSourceResponseInterface, \ - BinnedDataInterface, DataInterface + BinnedDataInterface, DataInterface, ThreeMLSourceResponseInterface from astromodels import Model from astromodels.sources import PointSource, ExtendedSource @@ -10,7 +13,80 @@ __all__ = ["BinnedThreeMLModelFolding"] -class BinnedThreeMLModelFolding(BinnedThreeMLModelFoldingInterface): +class ThreeMLModelFoldingCacheSourceResponsesMixin: + """ + Avoid duplicating code that is the same for the binned and unbinned case + + Needs: + self._model, + """ + _model: Model + _source_responses: Dict[str, ThreeMLSourceResponseInterface] + _psr: ThreeMLSourceResponseInterface + _esr: ThreeMLSourceResponseInterface + _cached_model_dict: dict + + def _cache_source_responses(self): + """ + Create a copy of the PSR and ESR for each source + + Returns True if there was any update + + Updates _cached_model_dict and _source_responses + """ + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + current_model_dict = self._model.to_dict() + + # TODO: currently Model.__eq__ seems broken. It returns. True even + # if the internal parameters changed. Caching the expected value + # is not implemented. Remove the "False and" when fixed + if self._cached_model_dict is not None and self._cached_model_dict == current_model_dict: + # Nothing to do + return False + + # This accounts for the possibility of some sources being added or + # removed from the model. + new_source_responses = {} + + for name, source in self._model.sources.items(): + + if name in self._source_responses: + # Use cache + new_source_responses[name] = self._source_responses[name] + continue + + if isinstance(source, PointSource): + + if self._psr is None: + raise RuntimeError("The model includes a point source but no point source response was provided") + + psr_copy = self._psr.copy() + psr_copy.set_source(source) + new_source_responses[name] = psr_copy + elif isinstance(source, ExtendedSource): + + if self._esr is None: + raise RuntimeError("The model includes an extended source but no extended source response was provided") + + esr_copy = self._esr.copy() + esr_copy.set_source(source) + new_source_responses[name] = esr_copy + else: + raise RuntimeError(f"The model contains the source {name} " + f"of type {type(source)}. I don't know " + "how to handle it!") + + self._source_responses = new_source_responses + + # See this issue for the caveats of comparing models + # https://github.com/threeML/threeML/issues/645 + self._cached_model_dict = current_model_dict + + return True + +class BinnedThreeMLModelFolding(BinnedThreeMLModelFoldingInterface, ThreeMLModelFoldingCacheSourceResponsesMixin): def __init__(self, data: BinnedDataInterface, @@ -58,48 +134,6 @@ def set_model(self, model: Model): self._model = model - def _cache_source_responses(self): - """ - Create a copy of the PSR and ESR for each source - Returns - ------- - - """ - - # This accounts for the possibility of some sources being added or - # removed from the model. - new_source_responses = {} - - for name,source in self._model.sources.items(): - - if name in self._source_responses: - # Used cache - new_source_responses[name] = self._source_responses[name] - continue - - if isinstance(source, PointSource): - - if self._psr is None: - raise RuntimeError("The model includes a point source but no point source response was provided") - - psr_copy = self._psr.copy() - psr_copy.set_source(source) - new_source_responses[name] = psr_copy - elif isinstance(source, ExtendedSource): - - if self._esr is None: - raise RuntimeError("The model includes an extended source but no extended source response was provided") - - esr_copy = self._esr.copy() - esr_copy.set_source(source) - new_source_responses[name] = esr_copy - else: - raise RuntimeError(f"The model contains the source {name} " - f"of type {type(source)}. I don't know " - "how to handle it!") - - self._source_responses = new_source_responses - def expectation(self, axes:Axes, copy:bool = True)->Histogram: """ @@ -116,19 +150,10 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: if self._model is None: raise RuntimeError("Call set_data() and set_model() first") - # See this issue for the caveats of comparing models - # https://github.com/threeML/threeML/issues/645 - current_model_dict = self._model.to_dict() - - # If nothing has changed in the model, we can use the cached expectation - # as is. - # If the model has changed but the axes haven't, we can at least reuse - # it and prevent new memory allocation, we just need to zero it out + # Create a copy of the PSR and ESR for each source + model_changed = self._cache_source_responses() - # TODO: currently Model.__eq__ seems broken. It returns. True even - # if the internal parameters changed. Caching the expected value - # is not implemented. Remove the "False and" when fixed - if self._cached_model_dict is not None and self._cached_model_dict == current_model_dict: + if not model_changed: if copy: return self._expectation.copy() else: @@ -136,17 +161,10 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: else: self._expectation.clear() - # Create a copy of the PSR and ESR for each source - self._cache_source_responses() - # Convolve all sources with the response for source_name,psr in self._source_responses.items(): self._expectation += psr.expectation(axes) - # See this issue for the caveats of comparing models - # https://github.com/threeML/threeML/issues/645 - self._cached_model_dict = current_model_dict - if copy: return self._expectation.copy() else: diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py index b3692f91..3524c5f7 100644 --- a/cosipy/threeml/psr_fixed_ei.py +++ b/cosipy/threeml/psr_fixed_ei.py @@ -179,7 +179,6 @@ def _update_cache(self): self._last_convolved_source_dict = source_dict self._last_convolved_source_skycoord = coord.copy() - @property def ncounts(self) -> float: """ Total expected counts diff --git a/cosipy/threeml/unbinned_model_folding.py b/cosipy/threeml/unbinned_model_folding.py new file mode 100644 index 00000000..afdc9a69 --- /dev/null +++ b/cosipy/threeml/unbinned_model_folding.py @@ -0,0 +1,67 @@ +import itertools +from typing import Optional, Iterable + +from astromodels import Model, PointSource, ExtendedSource + +from cosipy.interfaces import UnbinnedThreeMLModelFoldingInterface, UnbinnedThreeMLSourceResponseInterface +from cosipy.interfaces.data_interface import EventDataInSCFrameInterface +from cosipy.response.threeml_response import ThreeMLModelFoldingCacheSourceResponsesMixin + + +class UnbinnedThreeMLModelFolding(UnbinnedThreeMLModelFoldingInterface, ThreeMLModelFoldingCacheSourceResponsesMixin): + + def __init__(self, + data: EventDataInSCFrameInterface, + point_source_response = UnbinnedThreeMLSourceResponseInterface, + extended_source_response: UnbinnedThreeMLSourceResponseInterface = None): + + # Interface inputs + self._model = None + + # Implementation inputs + self._psr = point_source_response + self._esr = extended_source_response + + # Cache + # Each source has its own cache. + # We could cache the sum of all sources, but I thought + # it was not worth it for the typical use case. Usually + # at least one source changes in between call + self._cached_model_dict = None + self._source_responses = {} + + def set_model(self, model: Model): + """ + The model is passed as a reference and it's parameters + can change. Remember to check if it changed since the + last time the user called expectation. + """ + self._model = model + + def ncounts(self) -> float: + """ + Total expected counts + """ + + self._cache_source_responses() + + return sum(s.ncounts() for s in self._source_responses.values()) + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. + + Parameters + ---------- + start : None | int + From beginning by default + stop: None|int + Until the end by default + """ + + self._cache_source_responses() + + sources_expectation_iter = itertools.product(*(s.expectation_density(start, stop) for s in self._source_responses.values())) + + return [sum(expectations) for expectations in sources_expectation_iter] diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 38132fec..c435dcab 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -2,6 +2,9 @@ # coding: utf-8 import logging + +from cosipy.threeml.unbinned_model_folding import UnbinnedThreeMLModelFolding + logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) @@ -20,7 +23,7 @@ from cosipy.threeml.psr_fixed_ei import UnbinnedThreeMLPointSourceResponseTrapz from cosipy.util import fetch_wasabi_file -from cosipy.statistics import PoissonLikelihood +from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse @@ -68,6 +71,9 @@ def main(): fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(sc_orientation_path), checksum='416fcc296fc37a056a069378a2d30cb2') + bkg_data_path = data_path / "Total_BG_3months_binned_data_filtered_with_SAAcut_SAAreducedHEPD01_DC3binning.hdf5" + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', + output=str(bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') profile.enable() # orientation history @@ -120,20 +126,31 @@ def main(): b=b, # Latitude (deg) spectral_shape=spectrum) # Spectral model - # Optional: free the position parameters - # source.position.l.free = True - # source.position.b.free = True - model = Model( source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) - psr.set_source(source) - logger.info("Updating PSR cache...") - psr._update_cache() - logger.info("Updating PSR cache DONE") - print(psr.ncounts) - print(np.fromiter(psr.expectation_density(), dtype = float)) + # Set model folding + response = UnbinnedThreeMLModelFolding(data, psr) + + # response.set_model(model) # optional. Will be called by likelihood + # print(response.ncounts()) + # print(np.fromiter(response.expectation_density(), dtype = float)) + + # Set background + bkg = BinnedData(data_path / "background.yaml") + bkg.load_binned_data_from_hdf5(binned_data=bkg_data_path) + + + like_fun = UnbinnedLikelihood(response, bkg) + + cosi = ThreeMLPluginInterface('cosi', like_fun) + + plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) + + like = JointLikelihood(model, plugins, verbose = False) + + like.fit() profile.disable() profile.dump_stats("prof_interfaces.prof") From dabc8a708eecce310c78925ce95b4aa4da083b45 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 10 Oct 2025 07:39:14 -0400 Subject: [PATCH 086/133] unbinned background. both expectation dentisty and event probability. examples not working. Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 205 +++++++++++++++--- cosipy/data_io/EmCDSUnbinnedData.py | 27 ++- cosipy/interfaces/data_interface.py | 48 ++-- cosipy/interfaces/event.py | 79 +++---- cosipy/interfaces/expectation_interface.py | 68 +++++- .../response/instrument_response_function.py | 4 +- cosipy/spacecraftfile/spacecraft_file.py | 10 +- cosipy/statistics/likelihood_functions.py | 5 + cosipy/threeml/psr_fixed_ei.py | 45 ++-- cosipy/threeml/unbinned_model_folding.py | 18 +- ...mple_crab_fit_threeml_plugin_interfaces.py | 4 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 88 ++++++-- .../examples/toy/toy_interfaces_example.py | 40 ++-- 13 files changed, 461 insertions(+), 180 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 84e32de1..b90e1219 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -1,17 +1,29 @@ -from typing import Dict, Tuple, Union, Any +import itertools +from typing import Dict, Tuple, Union, Any, Type, Optional, Iterable import numpy as np from astromodels import Parameter +from astropy.coordinates import SkyCoord, CartesianRepresentation, UnitSphericalRepresentation +from astropy.time import Time from histpy import Histogram from histpy import Axes from astropy import units as u +from scoords import SpacecraftFrame -from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface, DataInterface +from cosipy import SpacecraftHistory +from cosipy.data_io.EmCDSUnbinnedData import TimeTagEmCDSEventInSCFrame +from cosipy.interfaces import BinnedBackgroundInterface, BinnedDataInterface, DataInterface, BackgroundDensityInterface, \ + BackgroundInterface, EventInterface __all__ = ["FreeNormBinnedBackground"] -class FreeNormBinnedBackground(BinnedBackgroundInterface): +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface + +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface +from cosipy.util.iterables import itertools_batched + +class FreeNormBackground(BackgroundInterface): """ This must translate to/from regular parameters with arbitrary type from/to 3ML parameters @@ -22,16 +34,41 @@ class FreeNormBinnedBackground(BinnedBackgroundInterface): _default_label = 'bkg' - def __init__(self, distribution:Union[Histogram, Dict[str, Histogram]]): + def __init__(self, + distribution:Union[Histogram, Dict[str, Histogram]], + sc_history:SpacecraftHistory, + copy = True): + """ + + Parameters + ---------- + distribution + sc_history + copy: copy hist distribution + """ if isinstance(distribution, Histogram): # Single component self._distributions = {self._default_label: distribution} - self._norms = 1. + self._norms = np.ones(1) # Hz. Each component + self._norm = 1 # Hz. Total else: # Multiple label components. self._distributions = distribution - self._norms = {f"{l}_norm":1. for l in self.labels} + self._norms = np.ones(self.ncomponents) # Hz Each component + self._norm = len(self.labels) # Hz. Total + + self._labels = tuple(self._distributions.keys()) + + # Normalize + # Unit: second + self._livetime = sc_history.cumulative_livetime().to_value(u.s) + for label,dist in self._distributions.items(): + dist_norm = np.sum(dist) + if copy: + self._distributions[label] = dist*(self._livetime/dist_norm) + else: + dist *= (self._livetime/dist_norm) # These will be densify anyway since _expectation is dense # And histpy doesn't yet handle this operation efficiently @@ -52,28 +89,24 @@ def __init__(self, distribution:Union[Histogram, Dict[str, Histogram]]): if self._axes != bkg.axes: raise ValueError("All background components mus have the same axes") - # Cache - self._expectation = None - self._last_norm_values = None - @property def _single_component(self): - return not isinstance(self._norms, dict) + return self.ncomponents == 1 @property def norm(self): + """ + Sum of all rates + """ - if not self._single_component: - raise RuntimeError("This property can only be used for single-component models") - - return self._norms + return u.Quantity(self._norm, u.Hz) @property def norms(self): if self._single_component: - return {f"{self._default_label}_norm": self._norms} + return {f"{self._default_label}_norm": u.Quantity(self._norms[0], u.Hz)} else: - return self._norms.items() + return {l:u.Quantity(n, u.Hz, copy = False) for l,n in zip(self.labels,self._norms)} @property def ncomponents(self): @@ -85,36 +118,47 @@ def meausured_axes(self): @property def labels(self): - return self._distributions.keys() + return self._labels - def set_norm(self, norm: Union[float, Dict[str, float]]): + def set_norm(self, norm: Union[u.Quantity, Dict[str, u.Quantity]]): if self._single_component: if isinstance(norm, dict): - self._norms = norm[f'{self._default_label}_norm'] + self._norms[0] = norm[f'{self._default_label}_norm'].to_value(u.Hz) else: - self._norms = norm + self._norms[0] = norm.to_value(u.Hz) else: # Multiple if not isinstance(norm, dict): raise TypeError("This a multi-component background. Provide labeled norm values in a dictionary") for label,norm_i in norm.items(): - if label not in self._norms.keys(): - raise ValueError(f"Norm {label} not in {self._norms.keys()}") + if label not in self.labels: + raise ValueError(f"Norm {label} not in {self.labels}") + + self._norms[self.labels.index(label)] = norm_i.to_value(u.Hz) - self._norms[label] = norm_i + self._norm = sum(n for n in self._norms) def set_parameters(self, **parameters:Dict[str, u.Quantity]) -> None: """ Same keys as background components """ - self.set_norm({l:p.value for l,p in parameters.items()}) + self.set_norm(parameters) @property def parameters(self) -> Dict[str, u.Quantity]: - return {l:u.Quantity(n) for l,n in self.norms.items()} + return self.norms + +class FreeNormBinnedBackground(FreeNormBackground, BinnedBackgroundInterface): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Cache + self._expectation = None + self._last_norm_values = None def expectation(self, axes:Axes, copy:bool = True)->Histogram: """ @@ -163,3 +207,112 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: else: return self._expectation + +class FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(FreeNormBackground, BackgroundDensityInterface): + + def event_type(self) -> Type[EventInterface]: + return TimeTagEmCDSEventInSCFrameInterface + + def __init__(self, + data:TimeTagEmCDSEventDataInSCFrameInterface, + distribution:Union[Histogram, Dict[str, Histogram]], + sc_history:SpacecraftHistory, + copy=True, + batch_size = 100000, + *args, + **kwargs): + + super().__init__(distribution, sc_history, + copy=copy, *args, **kwargs) + + # We need the density per phase space for the specific measurement units TimeTagEmCDSEventInSCFrameInterface + # Energy: keV + # Phi: rad + # PsiChi: sr (for the phase space. The axis is a HealpixAxis) + # Time: seconds (already in super()) + + psichi_frame = None + + for label,dist in self._distributions.items(): + + dist = self._distributions[label] = dist.project('Em', 'Phi', 'PsiChi') + + dist.axes['Em'] = dist.axes['Em'].to(u.keV).to(None, copy=False, update=False) + dist.axes['Phi'] = dist.axes['Phi'].to(u.rad).to(None, copy=False, update=False) + + energy_phase_space = dist.axes['Em'].widths + phi_phase_space = dist.axes['Phi'].widths + psichi_phase_space = dist.axes['PsiChi'].pixarea().to_value(u.sr) + + if psichi_frame is None: + psichi_frame = dist.axes['PsiChi'].coordsys + else: + if psichi_frame != dist.axes['PsiChi'].coordsys: + raise ValueError("All PsiChi axes must be in the same frame") + + dist /= dist.axes.expand_dims(energy_phase_space, 'Em') + dist /= dist.axes.expand_dims(phi_phase_space, 'Phi') + dist /= psichi_phase_space + + # Compute the probabilities once and for all + # TODO: account for livetime + self._prob = [[] for _ in range(self.ncomponents)] + + for events_chunk in itertools_batched(data, batch_size): + + jd1, jd2, energy,phi, psichi_lon, psichi_lat = np.asarray([[ + event.jd1, + event.jd2, + event.energy_keV, + event.scattering_angle_rad, + event.scattered_lon_rad_sc, + event.scattered_lat_rad_sc] + for event in events_chunk], dtype=float).transpose() + + times = Time(jd1, jd2, format = 'jd') + + # Transform local to inertial + attitudes = sc_history.interp_attitude(times).transform_to(psichi_frame) + sc_psichi_coord = SkyCoord(psichi_lon, psichi_lat, unit=u.rad, frame=SpacecraftFrame()) + sc_psichi_vec = sc_psichi_coord.cartesian.xyz.value + inertial_psichi_vec = attitudes.rot.inv().apply(sc_psichi_vec.transpose()) + inertial_psichi_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*inertial_psichi_vec.transpose())) + inertial_psichi_coord = SkyCoord(inertial_psichi_sph, frame = psichi_frame) + + for label,dist in self._distributions.items(): + self._prob[self.labels.index(label)].extend(dist.interp(energy, phi, inertial_psichi_coord)) + + self._prob = np.asarray(self._prob) + + def ncounts(self) -> float: + """ + Total expected counts + """ + return self._livetime * self._norm + + def expectation_density(self, start: Optional[int] = None, stop: Optional[int] = None) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. This equals the event probabiliy times the number of events + + This is provided as a helper function assuming the child classes implemented event_probability + + Parameters + ---------- + start + stop + + Returns + ------- + + """ + + # Multiply each probability by the norm, and then sum + return np.tensordot(self._prob, self._norms, axes = (0,0)) + + + + + + + diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py index 9a8b1b55..e9b927c7 100644 --- a/cosipy/data_io/EmCDSUnbinnedData.py +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Iterable, Iterator, Optional, Tuple +from typing import Iterable, Iterator, Optional, Tuple, Union, List import numpy as np from astropy.coordinates import BaseCoordinateFrame, Angle, SkyCoord, UnitSphericalRepresentation @@ -10,8 +10,8 @@ from cosipy import UnBinnedData from cosipy.interfaces import EventWithEnergyInterface, EventDataInterface, EventDataWithEnergyInterface -from cosipy.interfaces.data_interface import ComptonDataSpaceEventDataInterface, TimeTagEmCDSEventDataInSCFrameInterface -from cosipy.interfaces.event import ComptonDataSpaceEventInterface, TimeTagEmCDSEventInSCFrameInterface, \ +from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import TimeTagEmCDSEventInSCFrameInterface, \ EmCDSEventInSCFrameInterface import astropy.units as u @@ -56,11 +56,11 @@ def scattering_angle_rad(self) -> float: return self._scatt_angle @property - def scattered_lon_rad(self) -> float: + def scattered_lon_rad_sc(self) -> float: return self._scatt_lon @property - def scattered_lat_rad(self) -> float: + def scattered_lat_rad_sc(self) -> float: return self._scatt_lat class TimeTagEmCDSEventInSCFrame(EmCDSEventInSCFrame, TimeTagEmCDSEventInSCFrameInterface): @@ -157,8 +157,8 @@ def __init__(self, new_jd2.append(event.jd2) new_energy.append(event.energy_keV) new_scatt_angle.append(event.scattering_angle_rad) - new_scatt_lat.append(event.scattered_lat_rad) - new_scatt_lon.append(event.scattered_lon_rad) + new_scatt_lat.append(event.scattered_lat_rad_sc) + new_scatt_lon.append(event.scattered_lon_rad_sc) nevents += 1 self._nevents = nevents @@ -200,7 +200,7 @@ def jd2(self) -> Iterable[float]: return self._jd2 @property - def energy_rad(self) -> Iterable[float]: + def energy_keV(self) -> Iterable[float]: return self._energy @property @@ -208,16 +208,16 @@ def scattering_angle_rad(self) -> Iterable[float]: return self._scatt_angle @property - def scattered_lon_rad(self) -> Iterable[float]: + def scattered_lon_rad_sc(self) -> Iterable[float]: return self._scatt_lon @property - def scattered_lat_rad(self) -> Iterable[float]: + def scattered_lat_rad_sc(self) -> Iterable[float]: return self._scatt_angle class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): - def __init__(self, *data_path: Tuple[Path], + def __init__(self, data_path: Union[Path, List[Path]], selection:EventSelectorInterface = None): time = np.empty(0) @@ -226,9 +226,12 @@ def __init__(self, *data_path: Tuple[Path], psi = np.empty(0) chi = np.empty(0) + if isinstance(data_path, (str, Path)): + data_path = [Path(data_path)] + for file in data_path: # get_dict_from_fits is really a static method, no config file needed - data_dict = UnBinnedData.get_dict_from_fits(None, file) + data_dict = UnBinnedData.get_dict_from_fits(None, str(file)) time = np.append(time, data_dict['TimeTags']) energy = np.append(energy, data_dict['Energies']) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index d1ed8f89..d1c97444 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -8,8 +8,8 @@ from scoords import SpacecraftFrame from . import EventWithEnergyInterface -from .event import EventInterface, TimeTagEventInterface, ComptonDataSpaceEventInterface, \ - ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface +from .event import EventInterface, TimeTagEventInterface, \ + ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface, EventWithScatteringAngleInterface from histpy import Histogram, Axes from astropy.time import Time @@ -30,9 +30,7 @@ @runtime_checkable class DataInterface(Protocol): - - # Type returned by __iter__ in the event data case - event_type = ClassVar[Type] + pass @runtime_checkable class BinnedDataInterface(DataInterface, Protocol): @@ -53,11 +51,12 @@ def fill(self, event_data:Iterable[EventInterface]): """ - - @runtime_checkable class EventDataInterface(DataInterface, Protocol): + # Type returned by __iter__ + event_type = ClassVar[Type[EventInterface]] + def __iter__(self) -> Iterator[EventInterface]: """ Return one Event at a time @@ -81,7 +80,7 @@ def nevents(self) -> int: return sum(1 for _ in iter(self)) @property - def ids(self) -> Iterable[int]: + def id(self) -> Iterable[int]: return [e.id for e in self] @runtime_checkable @@ -117,16 +116,14 @@ def energy(self) -> Quantity: """ return Quantity(self.energy_rad, u.rad) -@runtime_checkable -class ComptonDataSpaceEventDataInterface(EventDataInterface, Protocol): - def __iter__(self) -> Iterator[ComptonDataSpaceEventInterface]:... +@runtime_checkable +class EventDataWithScatteringAngleInterface(EventDataInterface, Protocol): - @property - def frame(self) -> BaseCoordinateFrame: ... + def __iter__(self) -> Iterator[EventWithScatteringAngleInterface]:... @property - def scattering_angle_rad(self) -> Iterable[float]:... + def scattering_angle_rad(self) -> Iterable[float]: ... @property def scattering_angle(self) -> Angle: @@ -135,21 +132,26 @@ def scattering_angle(self) -> Angle: """ return Angle(self.scattering_angle_rad, u.rad) +@runtime_checkable +class ComptonDataSpaceInSCFrameEventDataInterface(EventDataWithScatteringAngleInterface, Protocol): + + def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... + @property - def scattered_lon_rad(self) -> Iterable[float]: ... + def scattered_lon_rad_sc(self) -> Iterable[float]: ... @property - def scattered_lat_rad(self) -> Iterable[float]: ... + def scattered_lat_rad_sc(self) -> Iterable[float]: ... @property - def scattered_direction(self) -> SkyCoord: + def scattered_direction_sc(self) -> SkyCoord: """ Add fancy energy quantity """ - return SkyCoord(self.scattered_lon_rad, - np.pi/2 - self.scattered_lat_rad, + return SkyCoord(self.scattered_lon_rad_sc, + np.pi / 2 - self.scattered_lat_rad_sc, unit = u.rad, - frame = self.frame) + frame = SpacecraftFrame()) @runtime_checkable class EventDataInSCFrameInterface(EventDataInterface, Protocol): @@ -157,12 +159,6 @@ class EventDataInSCFrameInterface(EventDataInterface, Protocol): @property def frame(self) -> SpacecraftFrame:... -@runtime_checkable -class ComptonDataSpaceInSCFrameEventDataInterface(EventDataInSCFrameInterface, - ComptonDataSpaceEventDataInterface, - Protocol): - def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... - class TimeTagEmCDSEventDataInSCFrameInterface(TimeTagEventDataInterface, EventDataWithEnergyInterface, ComptonDataSpaceInSCFrameEventDataInterface): diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index 23b1221e..92d081d6 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Sequence, Union, Protocol +from symtable import Class +from typing import Sequence, Union, Protocol, ClassVar import numpy as np from astropy.coordinates import Angle, SkyCoord, BaseCoordinateFrame @@ -16,32 +17,15 @@ "EventWithEnergyInterface", ] -class EventMetadata: - - def __init__(self): - self._metadata = {} - - def __getitem__(self, key): - return self._metadata[key] - - def __setitem__(self, key, value): - self._metadata[key] = value - setattr(self, key, value) - - def __delitem__(self, key): - if key in self._metadata: - del self._metadata[key] - delattr(self, key) - - def __repr__(self): - return f"{self.__class__.__name__}({self._metadata})" - @runtime_checkable class EventInterface(Protocol): """ Derived classes implement all accessors """ + # This makes sure that all PDFs have the same units + data_space_units = ClassVar[Union[u.Unit, None]] + @property def id(self) -> int: """ @@ -50,12 +34,11 @@ def id(self) -> int: No necessarily in sequential order """ - @property - def metadata(self) -> EventMetadata:... - @runtime_checkable class TimeTagEventInterface(EventInterface, Protocol): + data_space_units = u.s + @property def jd1(self) -> float:... @@ -72,6 +55,8 @@ def time(self) -> Time: @runtime_checkable class EventWithEnergyInterface(EventInterface, Protocol): + data_space_units = u.keV + @property def energy_keV(self) -> float:... @@ -83,14 +68,14 @@ def energy(self) -> Quantity: return Quantity(self.energy_keV, u.keV) @runtime_checkable -class ComptonDataSpaceEventInterface(EventInterface, Protocol): +class EventWithScatteringAngleInterface(EventInterface, Protocol): - @property - def frame(self) -> BaseCoordinateFrame:... + data_space_units = u.rad @property def scattering_angle_rad(self) -> float: ... + @property def scattering_angle(self) -> Angle: """ @@ -98,47 +83,39 @@ def scattering_angle(self) -> Angle: """ return Angle(self.scattering_angle_rad, u.rad) + +@runtime_checkable +class ComptonDataSpaceInSCFrameEventInterface(EventWithScatteringAngleInterface, Protocol): + + data_space_units = EventWithScatteringAngleInterface.data_space_units * u.sr + @property - def scattered_lon_rad(self) -> float: ... + def scattered_lon_rad_sc(self) -> float: ... @property - def scattered_lat_rad(self) -> float: ... + def scattered_lat_rad_sc(self) -> float: ... @property - def scattered_direction(self) -> SkyCoord: + def scattered_direction_sc(self) -> SkyCoord: """ Add fancy energy quantity """ - return SkyCoord(self.scattered_lon_rad, - np.pi/2 - self.scattered_lat_rad, + return SkyCoord(self.scattered_lon_rad_sc, + np.pi / 2 - self.scattered_lat_rad_sc, unit=u.rad, - frame=self.frame) - - -@runtime_checkable -class EventInSCFrameInterface(EventInterface, Protocol): - - @property - def frame(self) -> SpacecraftFrame:... - -@runtime_checkable -class ComptonDataSpaceInSCFrameEventInterface(EventInSCFrameInterface, - ComptonDataSpaceEventInterface, - Protocol): - pass + frame=SpacecraftFrame()) @runtime_checkable -class EmCDSEventInSCFrameInterface(EventInSCFrameInterface, - EventWithEnergyInterface, - ComptonDataSpaceEventInterface, +class EmCDSEventInSCFrameInterface(EventWithEnergyInterface, + ComptonDataSpaceInSCFrameEventInterface, Protocol): - pass + data_space_units = ComptonDataSpaceInSCFrameEventInterface.data_space_units * EventWithEnergyInterface.data_space_units @runtime_checkable class TimeTagEmCDSEventInSCFrameInterface(TimeTagEventInterface, EmCDSEventInSCFrameInterface, Protocol): - pass + data_space_units = EmCDSEventInSCFrameInterface.data_space_units * TimeTagEventInterface.data_space_units diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index b7bc11e7..3266cc69 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -1,4 +1,6 @@ -from typing import Protocol, runtime_checkable, Dict, Any, Generator, Iterable, Optional, Union, Iterator +import operator +from typing import Protocol, runtime_checkable, Dict, Any, Generator, Iterable, Optional, Union, Iterator, ClassVar, \ + Type import histpy import numpy as np @@ -37,16 +39,34 @@ def expectation(self, axes:Axes, copy: Optional[bool])->histpy.Histogram: @runtime_checkable class ExpectationDensityInterface(ExpectationInterface, Protocol): + """ + This interface doesn't take an EventDataInterface or Iterable[EventInterface] + because that would complicate caching. The stream of events is assumed + constant after selection. + """ + + # The event class that the instance handles + @property + def event_type(self) -> Type[EventInterface]: + """ + The event class that the implementation can handle + """ def ncounts(self) -> float: """ Total expected counts """ - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: """ - Return the expected number of counts density from the start-th event - to the stop-th event. + Return the probability of obtaining the observed set of measurement of each event, + given that the event was detected. It equals the expectation density times ncounts + + The units of the output the inverse of the phase space of the event_type data space. + e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV + + This is provided as a helper function assuming the child classes implemented expectation_density + Parameters ---------- @@ -56,4 +76,44 @@ def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = N Until the end by default """ + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.event_probability is ExpectationDensityInterface.event_probability + and + cls.expectation_density is ExpectationDensityInterface.expectation_density): + raise NotImplementedError("Implement event_probability and/or expectation_density") + + ncounts = self.ncounts() + return [expectation/ncounts for expectation in self.expectation_density(start, stop)] + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + """ + Return the expected number of counts density from the start-th event + to the stop-th event. This equals the event probabiliy times the number of events + + This is provided as a helper function assuming the child classes implemented event_probability + + Parameters + ---------- + start + stop + + Returns + ------- + + """ + # Guard to avoid infinite recursion in incomplete child classes + cls = type(self) + if ( + cls.event_probability is ExpectationDensityInterface.event_probability + and + cls.expectation_density is ExpectationDensityInterface.expectation_density): + raise NotImplementedError("Implement event_probability and/or expectation_density") + + ncounts = self.ncounts() + return [prob*ncounts for prob in self.event_probability(start, stop)] + + + diff --git a/cosipy/response/instrument_response_function.py b/cosipy/response/instrument_response_function.py index b4989b4c..8aee900e 100644 --- a/cosipy/response/instrument_response_function.py +++ b/cosipy/response/instrument_response_function.py @@ -87,8 +87,8 @@ def differential_effective_area_cm2(self, query: Iterable[Tuple[PhotonWithDirect photon.energy_keV, event.energy_keV, event.scattering_angle_rad, - event.scattered_lat_rad, - event.scattered_lon_rad, + event.scattered_lat_rad_sc, + event.scattered_lon_rad_sc, ] for photon,event in query_chunk], dtype=float).transpose() direction_ph = SkyCoord(lon_ph, lat_ph, unit = u.rad, frame = SpacecraftFrame()) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index e842b4f4..7acea318 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -17,7 +17,7 @@ from .scatt_map import SpacecraftAttitudeMap -from typing import Union +from typing import Union, Optional import logging logger = logging.getLogger(__name__) @@ -324,13 +324,15 @@ def _cumulative_livetime(self, points, weights) -> u.Quantity: return cum_livetime - def cumulative_livetime(self, time: Time) -> u.Quantity: + def cumulative_livetime(self, time: Optional[Time] = None) -> u.Quantity: """ Get the cumulative live obstime up to this obstime. The live obstime in between the internal timestamp is assumed constant. + All by edfault + Parameters ---------- time: @@ -341,6 +343,10 @@ def cumulative_livetime(self, time: Time) -> u.Quantity: Cummulative live obstime, with units. """ + if time is None: + # All + return np.sum(self.livetime) + points, weights = self.interp_weights(time) return self._cumulative_livetime(points, weights) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 56173077..60647843 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -92,6 +92,11 @@ def get_log_like(self) -> float: self._nobservations = nobservations + # Log L = -Ntot + sum_i (dN/dOmega)_i + # (dN/dOmega)_i is the expectation density, not a derivative + # (dN/dOmega)_i = Ntot*P_i, where P_i is the event probability + # Alternatively + # Log L = Ntot(Nevents - 1) + sum_i P_i log_like = density_log_sum - ntot return log_like diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py index 3524c5f7..f965ccbb 100644 --- a/cosipy/threeml/psr_fixed_ei.py +++ b/cosipy/threeml/psr_fixed_ei.py @@ -1,5 +1,5 @@ import copy -from typing import Optional, Iterable +from typing import Optional, Iterable, Type import numpy as np from astromodels import PointSource @@ -10,9 +10,9 @@ from cosipy import SpacecraftHistory from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame -from cosipy.interfaces import UnbinnedThreeMLSourceResponseInterface +from cosipy.interfaces import UnbinnedThreeMLSourceResponseInterface, EventInterface from cosipy.interfaces.data_interface import TimeTagEmCDSEventDataInSCFrameInterface -from cosipy.interfaces.event import EmCDSEventInSCFrameInterface +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface, TimeTagEmCDSEventInSCFrameInterface from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface from cosipy.response.photon_types import PhotonWithDirectionAndEnergyInSCFrame @@ -71,21 +71,22 @@ def __init__(self, # For integral for nevents # int Aeff(t, Ei) F(Ei) dt dEi - # Will need to multiply by F(Ei) and sum. - # It includes the trapezoidal rule weights - # and the time integral based on source position - # and SC history + # Will need to multiply by _trapz_weights*F(Ei) and sum. # Once per Ei - self._nevents_weights = None # In cm2*s*keV + self._exposure = None # In cm2*s # axis 0: events # axis 1: energy_i samples - self._event_prob_weights = None + self._event_prob_weights = None # in cm2/keV # Integrated over Ei self._nevents = None self._event_prob = None + @property + def event_type(self) -> Type[EventInterface]: + return TimeTagEmCDSEventInSCFrameInterface + def set_source(self, source: Source): """ The source is passed as a reference and it's parameters @@ -102,7 +103,7 @@ def clear_cache(self): self._last_convolved_source_dict = None self._last_convolved_source_skycoord = None self._nevents = None - self._nevents_weights = None + self._exposure = None self._event_prob = None self._event_prob_weights = None @@ -142,7 +143,7 @@ def _update_cache(self): # Nothing has changed return - if (self._nevents_weights is None) or (self._event_prob_weights is None) or coord != self._last_convolved_source_skycoord: + if (self._exposure is None) or (self._event_prob_weights is None) or coord != self._last_convolved_source_skycoord: # Updating the location is very cost intensive. Only do if necessary # Compute nevents integral by integrating though the SC history @@ -155,26 +156,30 @@ def _update_cache(self): # For each SC timestamp, get the effective area for each energy point, store it as temporary array, # and multiply by livetime. # Sum up the exposure (one per energy point) without saving it to memory + # TODO: account for Earth occultation exposure = sum([dt*np.fromiter(self._irf.effective_area_cm2([PhotonWithDirectionAndEnergyInSCFrame(c.lon.rad, c.lat.rad, e) for e in self._energies_keV]), dtype = float) for c,dt in zip(sc_coord_sph,self._sc_ori.livetime.to_value(u.s))]) - self._nevents_weights = exposure * self._trapz_weights + self._exposure = exposure # cm2 * s # Get the probability for each event for the source location and each Ei - sc_coord_vec = self._attitude_at_event_times.rot[:-1].apply(coord_vec) + # TODO: account for livetime and Earth occultation + sc_coord_vec = self._attitude_at_event_times.rot.apply(coord_vec) sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) - self._event_prob_weights = np.fromiter(self._irf.event_probability([(PhotonWithDirectionAndEnergyInSCFrame(coord.lon.rad, coord.lat.rad, energy), event) + self._event_prob_weights = np.fromiter(self._irf.differential_effective_area_cm2([(PhotonWithDirectionAndEnergyInSCFrame(coord.lon.rad, coord.lat.rad, energy), event) for coord,event in zip(sc_coord_sph, self._data) \ for energy in self._energies_keV]), - dtype = float) + dtype = float) # cm2 / keV.rad.sr self._event_prob_weights = self._event_prob_weights.reshape((sc_coord_sph.size, self._energies_keV.size)) - # 3ML default units as cm, s and keV - flux_values = self._source(self._energies_keV) - self._nevents = np.sum(self._nevents_weights * flux_values) - self._event_prob = np.sum((self._event_prob_weights * flux_values[None, :]), axis=1) + flux_values = self._source(self._energies_keV) #1/cm2/s/keV (3Ml default) + weight_flux_values = flux_values * self._trapz_weights #1/cm2/s + self._nevents = np.sum(self._exposure * weight_flux_values) # unit-less + + self._event_prob = np.sum((self._event_prob_weights * weight_flux_values[None, :]), axis=1) # 1/keV.s.rad.sr + self._event_prob /= self._nevents self._last_convolved_source_dict = source_dict self._last_convolved_source_skycoord = coord.copy() @@ -189,7 +194,7 @@ def ncounts(self) -> float: return self._nevents - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. diff --git a/cosipy/threeml/unbinned_model_folding.py b/cosipy/threeml/unbinned_model_folding.py index afdc9a69..62a2f031 100644 --- a/cosipy/threeml/unbinned_model_folding.py +++ b/cosipy/threeml/unbinned_model_folding.py @@ -1,17 +1,17 @@ import itertools from typing import Optional, Iterable +import numpy as np from astromodels import Model, PointSource, ExtendedSource from cosipy.interfaces import UnbinnedThreeMLModelFoldingInterface, UnbinnedThreeMLSourceResponseInterface -from cosipy.interfaces.data_interface import EventDataInSCFrameInterface +from cosipy.interfaces.data_interface import EventDataInSCFrameInterface, EventDataInterface from cosipy.response.threeml_response import ThreeMLModelFoldingCacheSourceResponsesMixin class UnbinnedThreeMLModelFolding(UnbinnedThreeMLModelFoldingInterface, ThreeMLModelFoldingCacheSourceResponsesMixin): def __init__(self, - data: EventDataInSCFrameInterface, point_source_response = UnbinnedThreeMLSourceResponseInterface, extended_source_response: UnbinnedThreeMLSourceResponseInterface = None): @@ -22,6 +22,11 @@ def __init__(self, self._psr = point_source_response self._esr = extended_source_response + if (self._psr is not None) and (self._esr is not None) and self._psr.event_type != self._esr.event_type: + raise RuntimeError("Point and Extended Source Response must handle the same event type") + + self._event_type = self._psr.event_type + # Cache # Each source has its own cache. # We could cache the sum of all sources, but I thought @@ -30,6 +35,10 @@ def __init__(self, self._cached_model_dict = None self._source_responses = {} + @property + def event_type(self): + return self._event_type + def set_model(self, model: Model): """ The model is passed as a reference and it's parameters @@ -47,7 +56,7 @@ def ncounts(self) -> float: return sum(s.ncounts() for s in self._source_responses.values()) - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. @@ -63,5 +72,6 @@ def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = N self._cache_source_responses() sources_expectation_iter = itertools.product(*(s.expectation_density(start, stop) for s in self._source_responses.values())) + ncounts = self.ncounts() - return [sum(expectations) for expectations in sources_expectation_iter] + return [sum(expectations)/ncounts for expectations in sources_expectation_iter] diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 523db28b..5fc2e7d1 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -190,7 +190,9 @@ def main(): data = crab_bkg.get_em_cds() - bkg = FreeNormBinnedBackground(bkg_dist) + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=sc_orientation, + copy = False) # Currently using the same NnuLambda, Ei and Pol axes as the underlying FullDetectorResponse, # matching the behavior of v0.3. This is all the current BinnedInstrumentResponse can do. diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index c435dcab..f1bf34ce 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -3,6 +3,9 @@ import logging +from histpy import Histogram + +from cosipy.background_estimation.free_norm_threeml_binned_bkg import FreeNormBackgroundInterpolatedDensityTimeTagEmCDS from cosipy.threeml.unbinned_model_folding import UnbinnedThreeMLModelFolding logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', @@ -48,6 +51,8 @@ def main(): + use_bkg = False + profile = cProfile.Profile() # Download all data @@ -71,9 +76,9 @@ def main(): fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', output=str(sc_orientation_path), checksum='416fcc296fc37a056a069378a2d30cb2') - bkg_data_path = data_path / "Total_BG_3months_binned_data_filtered_with_SAAcut_SAAreducedHEPD01_DC3binning.hdf5" + binned_bkg_data_path = data_path / "bkg_binned_data.hdf5" fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', - output=str(bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') + output=str(binned_bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') profile.enable() # orientation history @@ -92,10 +97,31 @@ def main(): selector = TimeSelector(tstart = sc_orientation.tstart, tstop = sc_orientation.tstop) logger.info("Loading data...") - data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path, bkg_data_path, - selection=selector) + if use_bkg: + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits([crab_data_path, bkg_data_path], + selection=selector) + else: + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path, + selection=selector) + logger.info("Loading data DONE") + # Set background + + if use_bkg: + bkg = BinnedData(data_path / "background.yaml") + bkg.load_binned_data_from_hdf5(binned_data=str(binned_bkg_data_path)) + bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') + + # Workaround to avoid inf values. Our bkg should be smooth, but currently it's not. + bkg_dist += sys.float_info.min + + logger.info("Setting bkg...") + bkg = FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(data, bkg_dist, sc_orientation, copy = False) + logger.info("Setting bkg DONE") + else: + bkg = None + # Prepare point source response, which convolved the IRF with the SC orientation psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, dr.axes['Ei'].centers) @@ -104,7 +130,7 @@ def main(): b = -5.78 index = -1.99 - piv = 500. * u.keV + piv = 1 * u.MeV K = 0.048977e-3 / u.cm / u.cm / u.s / u.keV spectrum = Powerlaw() @@ -112,7 +138,10 @@ def main(): spectrum.index.min_value = -3 spectrum.index.max_value = -1 - spectrum.index.value = index + # Fix it for testing purposes + # spectrum.index.value = -2 + # spectrum.index.free = False + spectrum.K.value = K.value spectrum.piv.value = piv.value @@ -131,25 +160,58 @@ def main(): # Set model folding - response = UnbinnedThreeMLModelFolding(data, psr) + response = UnbinnedThreeMLModelFolding(psr) # response.set_model(model) # optional. Will be called by likelihood # print(response.ncounts()) # print(np.fromiter(response.expectation_density(), dtype = float)) - # Set background - bkg = BinnedData(data_path / "background.yaml") - bkg.load_binned_data_from_hdf5(binned_data=bkg_data_path) - - + # Setup likelihood like_fun = UnbinnedLikelihood(response, bkg) - cosi = ThreeMLPluginInterface('cosi', like_fun) + cosi = ThreeMLPluginInterface('cosi', like_fun, response, bkg) + + # Nuisance parameter guess, bounds, etc. + if use_bkg: + cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=100, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) plugins = DataList(cosi) # If we had multiple instruments, we would do e.g. DataList(cosi, lat, hawc, ...) like = JointLikelihood(model, plugins, verbose = False) + + # Grid + if use_bkg: + loglike = Histogram([np.geomspace(2e-6, 2e-4, 30), + np.geomspace(.1, 10, 31)], labels=['K', 'B']) + + for i, k in enumerate(loglike.axes['K'].centers): + for j, b in enumerate(loglike.axes['B'].centers): + spectrum.K.value = k + cosi.bkg_parameter['bkg_norm'].value = b + + loglike[i, j] = cosi.get_log_like() + + loglike.plot() + else: + loglike = Histogram([np.geomspace(2e-6, 2e-4, 30)], labels=['K'], axis_scale='log') + + for i, k in enumerate(loglike.axes['K'].centers): + spectrum.K.value = k + + loglike[i] = cosi.get_log_like() + + loglike.plot() + + plt.show() + + # Run like.fit() profile.disable() diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index bc8030fe..5974528f 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -13,7 +13,6 @@ from cosipy.event_selection.time_selection import TimeSelector from cosipy.interfaces.background_interface import BackgroundDensityInterface from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventDataInterface -from cosipy.interfaces.event import EventMetadata from cosipy.interfaces.event_selection import EventSelectorInterface from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood @@ -66,16 +65,11 @@ def __init__(self, index:int, x:float, time:Time): self._x = x self._jd1 = time.jd1 self._jd2 = time.jd2 - self._metadata = EventMetadata() @property def id(self): return self._id - @property - def metadata(self) -> EventMetadata: - return self._metadata - @property def x(self): return self._x @@ -194,7 +188,10 @@ def __init__(self, data: ToyEventData, duration:Quantity): self._norm = 1 self._sel_fraction = (duration/(1*u.day)).to_value('') - self._unit_expectation_density = self._sel_fraction/(toy_axis.hi_lim - toy_axis.lo_lim) + self._probability = self._sel_fraction / (toy_axis.hi_lim - toy_axis.lo_lim) + + def event_type(self) -> Type[EventInterface]: + return ToyEvent def set_parameters(self, **parameters:u.Quantity) -> None: self._norm = parameters['norm'].value @@ -202,12 +199,12 @@ def set_parameters(self, **parameters:u.Quantity) -> None: def ncounts(self) -> float: return self._norm * self._sel_fraction - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - density = self._norm * self._unit_expectation_density + prob = self._probability for _ in itertools.islice(self._data, start, stop): - yield density + yield prob @property def parameters(self) -> Dict[str, u.Quantity]: @@ -234,6 +231,10 @@ def __init__(self, data: ToyEventData, duration:Quantity): self._unit_expectation = Histogram(toy_axis, contents= self._sel_fraction * np.diff(norm.cdf(toy_axis.edges))) + @property + def event_type(self) -> Type[EventInterface]: + return ToyEvent + def ncounts(self) -> float: if self._source is None: @@ -244,19 +245,16 @@ def ncounts(self) -> float: ns_events = self._sel_fraction * self._source.spectrum.main.shape.k.value return ns_events - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - - # I expect in the real case it'll be more efficient to compute - # (ncounts, ncounts*prob) than (ncounts, prob) + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - cache = self.ncounts()*norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) + cache = norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) for n in cache: yield n # Alternative version without cache (slower) # for event in itertools.islice(self._data, start, stop): - # yield self.ncounts()*norm.pdf(event.x) + # yield norm.pdf(event.x) def set_source(self, source: Source): @@ -296,6 +294,10 @@ def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): self._psr = psr self._psr_copies = {} + @property + def event_type(self): + return ToyEvent + def ncounts(self) -> float: ncounts = 0 @@ -305,12 +307,12 @@ def ncounts(self) -> float: return ncounts - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: self._cache_psr_copies() - for expectations in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): - yield np.sum(expectations) + for prob in zip(*[p.event_probability(start, stop) for p in self._psr_copies.values()]): + yield np.sum(prob) def set_model(self, model: Model): From ba8511eea9c03e2aa5b8a15367320d26542eda2b Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 17 Oct 2025 07:44:17 -0400 Subject: [PATCH 087/133] Fix unbinned toy model and signal-only crab unbinned fit Signed-off-by: Israel Martinez --- cosipy/interfaces/expectation_interface.py | 33 ++++- cosipy/response/threeml_response.py | 3 + cosipy/statistics/likelihood_functions.py | 38 +---- cosipy/threeml/psr_fixed_ei.py | 4 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 130 ++++++++++++++++-- .../examples/toy/toy_interfaces_example.py | 127 ++++++++++------- 6 files changed, 241 insertions(+), 94 deletions(-) diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 3266cc69..4fc0c483 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -1,6 +1,6 @@ import operator from typing import Protocol, runtime_checkable, Dict, Any, Generator, Iterable, Optional, Union, Iterator, ClassVar, \ - Type + Type, Tuple import histpy import numpy as np @@ -114,6 +114,37 @@ def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = N ncounts = self.ncounts() return [prob*ncounts for prob in self.event_probability(start, stop)] +class SumExpectationDensity(ExpectationDensityInterface): + """ + Convenience class to sum multiple ExpectationDensityInterface implementation + """ + + def __init__(self, *expectations:Tuple[ExpectationDensityInterface]): + self._expectations = expectations + + self._event_type = expectations[0].event_type + + for ex in expectations: + if ex.event_type is not self._event_type: + raise TypeError("All expectations should have the same event type") + + @property + def event_type(self) -> Type[EventInterface]: + """ + The event class that the implementation can handle + """ + return self._event_type + + def ncounts(self) -> float: + """ + Total expected counts + """ + return sum(ex.ncounts() for ex in self._expectations) + + def expectation_density(self, start: Optional[int] = None, stop: Optional[int] = None) -> Iterable[float]: + + for exdensity in zip(*[ex.expectation_density(start, stop) for ex in self._expectations]): + yield sum(exdensity) diff --git a/cosipy/response/threeml_response.py b/cosipy/response/threeml_response.py index 7bf49d73..b00bb8a1 100644 --- a/cosipy/response/threeml_response.py +++ b/cosipy/response/threeml_response.py @@ -35,6 +35,9 @@ def _cache_source_responses(self): Updates _cached_model_dict and _source_responses """ + if self._model is None: + raise RuntimeError("Call set_model() first.") + # See this issue for the caveats of comparing models # https://github.com/threeML/threeML/issues/645 current_model_dict = self._model.to_dict() diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 60647843..56ad2f66 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -22,8 +22,8 @@ 'PoissonLikelihood'] class UnbinnedLikelihood(UnbinnedLikelihoodInterface): - def __init__(self, response:ExpectationDensityInterface, - bkg:BackgroundDensityInterface = None, + def __init__(self, + expectation:ExpectationDensityInterface, batch_size:int = 100000): """ Will get the number of events from the response and bkg expectation_density iterators @@ -34,16 +34,11 @@ def __init__(self, response:ExpectationDensityInterface, bkg """ - self._bkg = bkg - self._response = response + self._expectation = expectation self._nobservations = None self._batch_size = batch_size - @property - def has_bkg(self): - return self._bkg is not None - @property def nobservations(self) -> int: """ @@ -52,31 +47,14 @@ def nobservations(self) -> int: """ if self._nobservations is None: - self._nobservations = sum(1 for _ in self._get_density_iter()) + self._nobservations = sum(1 for _ in self._expectation.expectation_density()) return self._nobservations - def _get_density_iter(self): - - if self.has_bkg: - - signal_density = self._response.expectation_density() - bkg_density = self._bkg.expectation_density() - - return map(operator.add, signal_density, bkg_density) - - else: - - return self._response.expectation_density() - def get_log_like(self) -> float: - # Compute expectation including background - - ntot = self._response.ncounts() - - if self.has_bkg: - ntot += self._bkg.ncounts() + # Total number of events + ntot = self._expectation.ncounts() # It's faster to compute all log values at once, but requires keeping them in memory # Doing it by chunk is a compromise. We might need to adjust the chunk_size @@ -84,7 +62,7 @@ def get_log_like(self) -> float: nobservations = 0 density_log_sum = 0 - for density_iter_chunk in itertools_batched(self._get_density_iter(), self._batch_size): + for density_iter_chunk in itertools_batched(self._expectation.expectation_density(), self._batch_size): density = np.fromiter(density_iter_chunk, dtype=float) density_log_sum += np.sum(np.log(density)) @@ -95,8 +73,6 @@ def get_log_like(self) -> float: # Log L = -Ntot + sum_i (dN/dOmega)_i # (dN/dOmega)_i is the expectation density, not a derivative # (dN/dOmega)_i = Ntot*P_i, where P_i is the event probability - # Alternatively - # Log L = Ntot(Nevents - 1) + sum_i P_i log_like = density_log_sum - ntot return log_like diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py index f965ccbb..e8ff535f 100644 --- a/cosipy/threeml/psr_fixed_ei.py +++ b/cosipy/threeml/psr_fixed_ei.py @@ -150,7 +150,7 @@ def _update_cache(self): # This only computes the weights based on the source location. # Once we know the source source spectrum, we can integrate over Ei coord_vec = coord.transform_to(self._sc_ori.attitude.frame).cartesian.xyz.value - sc_coord_vec = self._sc_ori.attitude.rot[:-1].apply(coord_vec) + sc_coord_vec = self._sc_ori.attitude.rot[:-1].inv().apply(coord_vec) sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) # For each SC timestamp, get the effective area for each energy point, store it as temporary array, @@ -165,7 +165,7 @@ def _update_cache(self): # Get the probability for each event for the source location and each Ei # TODO: account for livetime and Earth occultation - sc_coord_vec = self._attitude_at_event_times.rot.apply(coord_vec) + sc_coord_vec = self._attitude_at_event_times.rot.inv().apply(coord_vec) sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) self._event_prob_weights = np.fromiter(self._irf.differential_effective_area_cm2([(PhotonWithDirectionAndEnergyInSCFrame(coord.lon.rad, coord.lat.rad, energy), event) for coord,event in zip(sc_coord_sph, self._data) \ diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index f1bf34ce..73007622 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -3,9 +3,11 @@ import logging -from histpy import Histogram +from astropy.utils.metadata.utils import dtype +from histpy import Histogram, HealpixAxis from cosipy.background_estimation.free_norm_threeml_binned_bkg import FreeNormBackgroundInterpolatedDensityTimeTagEmCDS +from cosipy.interfaces.expectation_interface import SumExpectationDensity from cosipy.threeml.unbinned_model_folding import UnbinnedThreeMLModelFolding logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', @@ -98,11 +100,12 @@ def main(): logger.info("Loading data...") if use_bkg: - data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits([crab_data_path, bkg_data_path], - selection=selector) + data_file = [crab_data_path, bkg_data_path] else: - data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(crab_data_path, - selection=selector) + data_file = crab_data_path + + data = TimeTagEmCDSEventDataInSCFrameFromDC3Fits(data_file, + selection=selector) logger.info("Loading data DONE") @@ -129,9 +132,9 @@ def main(): l = 184.56 b = -5.78 - index = -1.99 + index = -2.26 piv = 1 * u.MeV - K = 0.048977e-3 / u.cm / u.cm / u.s / u.keV + K = 3e-6 / u.cm / u.cm / u.s / u.keV spectrum = Powerlaw() @@ -139,8 +142,7 @@ def main(): spectrum.index.max_value = -1 # Fix it for testing purposes - # spectrum.index.value = -2 - # spectrum.index.free = False + spectrum.index.free = True spectrum.K.value = K.value spectrum.piv.value = piv.value @@ -167,7 +169,41 @@ def main(): # print(np.fromiter(response.expectation_density(), dtype = float)) # Setup likelihood - like_fun = UnbinnedLikelihood(response, bkg) + if use_bkg: + expectation_density = SumExpectationDensity(response, bkg) + else: + expectation_density = response + + # Test plots. REMOVE + response.set_model(model) + exdenlist = np.fromiter(expectation_density.expectation_density(), dtype=float) + + # plot expectation density energy + energy = np.fromiter([e.energy_keV for e in data], dtype = float) + fig,ax = plt.subplots() + ax.scatter(energy, exdenlist) + ax.set_xscale('log') + ax.set_yscale('log') + h = Histogram(np.geomspace(200,2000)) + h.fill(energy) + h /= h.axis.widths + h *= np.max(exdenlist) / np.max(h) + h.plot(ax) + plt.show() + + # plot expectation density phi + phi = np.fromiter([e.scattering_angle_rad for e in data], dtype = float) + phi *= 180/3.1416 + fig,ax = plt.subplots() + ax.scatter(phi, exdenlist) + h = Histogram(np.linspace(0,180)) + h.fill(phi) + h /= h.axis.widths + h *= np.max(exdenlist) / np.max(h) + h.plot(ax) + plt.show() + + like_fun = UnbinnedLikelihood(expectation_density) cosi = ThreeMLPluginInterface('cosi', like_fun, response, bkg) @@ -185,6 +221,78 @@ def main(): like = JointLikelihood(model, plugins, verbose = False) + # Run + print(data.nevents, expectation_density.ncounts()) + like.fit() + + results = like.results + + # Plot the fitted and injected spectra + + # In[14]: + + + fig, ax = plt.subplots() + + alpha_inj = -1.99 + beta_inj = -2.32 + E0_inj = 531. * (alpha_inj - beta_inj) * u.keV + xp_inj = E0_inj * (alpha_inj + 2) / (alpha_inj - beta_inj) + piv_inj = 100. * u.keV + K_inj = 7.56e-4 / u.cm / u.cm / u.s / u.keV + + spectrum_inj = Band() + + spectrum_inj.alpha.min_value = -2.14 + spectrum_inj.alpha.max_value = 3.0 + spectrum_inj.beta.min_value = -5.0 + spectrum_inj.beta.max_value = -2.15 + spectrum_inj.xp.min_value = 1.0 + + spectrum_inj.alpha.value = alpha_inj + spectrum_inj.beta.value = beta_inj + spectrum_inj.xp.value = xp_inj.value + spectrum_inj.K.value = K_inj.value + spectrum_inj.piv.value = piv_inj.value + + spectrum_inj.xp.unit = xp_inj.unit + spectrum_inj.K.unit = K_inj.unit + spectrum_inj.piv.unit = piv_inj.unit + + energy = np.geomspace(100 * u.keV, 10 * u.MeV).to_value(u.keV) + + flux_lo = np.zeros_like(energy) + flux_median = np.zeros_like(energy) + flux_hi = np.zeros_like(energy) + flux_inj = np.zeros_like(energy) + + parameters = {par.name: results.get_variates(par.path) + for par in results.optimized_model["source"].parameters.values() + if par.free} + + results_err = results.propagate(results.optimized_model["source"].spectrum.main.shape.evaluate_at, **parameters) + + for i, e in enumerate(energy): + flux = results_err(e) + flux_median[i] = flux.median + flux_lo[i], flux_hi[i] = flux.equal_tail_interval(cl=0.68) + flux_inj[i] = spectrum_inj.evaluate_at(e) + + ax.plot(energy, energy * energy * flux_median, label="Best fit") + ax.fill_between(energy, energy * energy * flux_lo, energy * energy * flux_hi, alpha=.5, label="Best fit (errors)") + ax.plot(energy, energy * energy * flux_inj, color='black', ls=":", label="Injected") + + ax.set_xscale("log") + ax.set_yscale("log") + + ax.set_xlabel("Energy (keV)") + ax.set_ylabel(r"$E^2 \frac{dN}{dE}$ (keV cm$^{-2}$ s$^{-1}$)") + + ax.legend() + + ax.set_ylim(.1,100) + + plt.show() # Grid if use_bkg: @@ -211,8 +319,6 @@ def main(): plt.show() - # Run - like.fit() profile.disable() profile.dump_stats("prof_interfaces.prof") diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 5974528f..e2ff2a97 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -14,6 +14,7 @@ from cosipy.interfaces.background_interface import BackgroundDensityInterface from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventDataInterface from cosipy.interfaces.event_selection import EventSelectorInterface +from cosipy.interfaces.expectation_interface import SumExpectationDensity from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood @@ -54,12 +55,16 @@ nevents_signal = 1000 nevents_bkg = 1000 nevents_tot = nevents_signal + nevents_bkg +use_bkg = True +use_signal = True class ToyEvent(TimeTagEventInterface, EventInterface): """ Unit-less 1D data of a measurement called "x" (could be anything) """ + data_space_units = u.s + def __init__(self, index:int, x:float, time:Time): self._id = index self._x = x @@ -83,8 +88,7 @@ def jd2(self): return self._jd2 class ToyData(DataInterface): - - event_type = ToyEvent + pass class ToyEventDataLoader(ToyData): # This simulates reading event from file @@ -93,13 +97,20 @@ class ToyEventDataLoader(ToyData): def __init__(self): rng = np.random.default_rng() - self._x = np.append(rng.normal(size=nevents_signal), - rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size=nevents_bkg)) + signal = rng.normal(size=nevents_signal) + bkg = rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size=nevents_bkg) + + if use_signal and use_bkg: + self._x = np.append(signal,bkg) + elif use_bkg: + self._x = bkg + elif use_signal: + self._x = signal self._tstart = Time("2000-01-01T00:00:00") self._tstop = Time("2000-01-02T00:00:00") - dt = np.random.uniform(size=nevents_tot) + dt = np.random.uniform(size=self._x.size) dt_sort = np.argsort(dt) self._x = self._x[dt_sort] dt = dt[dt_sort] @@ -114,6 +125,8 @@ def __iter__(self) -> Iterator[ToyEvent]: class ToyEventData(TimeTagEventDataInterface, ToyData): # Random data. Normal signal on top of uniform bkg + event_type = ToyEvent + def __init__(self, loader:ToyEventDataLoader, selector:EventSelectorInterface = None): self._loader = selector(loader) @@ -183,32 +196,31 @@ class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): def __init__(self, data: ToyEventData, duration:Quantity): self._data = data + self._duration = duration.to_value(u.s) self._unit_expectation = Histogram(toy_axis) - self._unit_expectation[:] = 1 / self._unit_expectation.nbins - self._norm = 1 + self._unit_expectation[:] = self._duration / self._unit_expectation.nbins + self._norm = 1 # Hz - self._sel_fraction = (duration/(1*u.day)).to_value('') - self._probability = self._sel_fraction / (toy_axis.hi_lim - toy_axis.lo_lim) + self._unit_expectation_density = self._duration / (toy_axis.hi_lim - toy_axis.lo_lim) + @property def event_type(self) -> Type[EventInterface]: return ToyEvent def set_parameters(self, **parameters:u.Quantity) -> None: - self._norm = parameters['norm'].value + self._norm = parameters['norm'].to_value(u.Hz) def ncounts(self) -> float: - return self._norm * self._sel_fraction - - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + return self._norm * self._duration - prob = self._probability + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: for _ in itertools.islice(self._data, start, stop): - yield prob + yield self._norm * self._unit_expectation_density @property def parameters(self) -> Dict[str, u.Quantity]: - return {'norm': u.Quantity(self._norm)} + return {'norm': u.Quantity(self._norm, u.Hz)} def expectation(self, axes:Axes, copy = True) -> Histogram: @@ -216,7 +228,7 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: raise ValueError("Wrong axes. I have fixed axes.") # Always a copy - return self._unit_expectation * self._norm * self._sel_fraction + return self._unit_expectation * self._norm class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): """ @@ -227,9 +239,9 @@ class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThree def __init__(self, data: ToyEventData, duration:Quantity): self._data = data self._source = None - self._sel_fraction = (duration/(1*u.day)).to_value('') + self._duration = duration.to_value(u.s) self._unit_expectation = Histogram(toy_axis, - contents= self._sel_fraction * np.diff(norm.cdf(toy_axis.edges))) + contents= self._duration * np.diff(norm.cdf(toy_axis.edges))) @property def event_type(self) -> Type[EventInterface]: @@ -242,21 +254,21 @@ def ncounts(self) -> float: # Get the latest values of the flux # Remember that _model can be modified externally between calls. - ns_events = self._sel_fraction * self._source.spectrum.main.shape.k.value + # This response doesn't have effective area or energy sensitivity. We're just using K as a rate + ns_events = self._duration * self._source.spectrum.main.shape.k.as_quantity.to_value(1/(u.s * u.keV * u.cm * u.cm)) return ns_events def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: cache = norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) - for n in cache: - yield n + for prob in cache: + yield prob # Alternative version without cache (slower) # for event in itertools.islice(self._data, start, stop): # yield norm.pdf(event.x) - def set_source(self, source: Source): if not isinstance(source, PointSource): @@ -274,10 +286,8 @@ def expectation(self, axes:Axes, copy = True) -> Histogram: # Get the latest values of the flux # Remember that _model can be modified externally between calls. - ns_events = self._source.spectrum.main.shape.k.value - # Always copies - return self._unit_expectation * ns_events + return self._unit_expectation * self._source.spectrum.main.shape.k.as_quantity.to_value(1/(u.s * u.keV * u.cm * u.cm)) def copy(self) -> "ToyPointSourceResponse": # We are not caching any results, so it's safe to do shallow copy without @@ -307,12 +317,16 @@ def ncounts(self) -> float: return ncounts - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: self._cache_psr_copies() - for prob in zip(*[p.event_probability(start, stop) for p in self._psr_copies.values()]): - yield np.sum(prob) + if not self._psr_copies: + for _ in itertools.islice(self._data, start, stop): + yield 0 + else: + for expectation in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): + yield np.sum(expectation) def set_model(self, model: Model): @@ -378,25 +392,27 @@ def main(): psr = ToyPointSourceResponse(data = event_data, duration = duration) response = ToyModelFolding(data = event_data, psr = psr) - bkg = ToyBkg(data = event_data, duration = duration) + + if use_bkg: + bkg = ToyBkg(data = event_data, duration = duration) + expectation_density = SumExpectationDensity(response, bkg) + else: + bkg = None + expectation_density = response ## Source model ## We'll just use the K value in u.cm / u.cm / u.s / u.keV spectrum = Constant() - spectrum.k.value = 1 - - polarized = False - if polarized: - polarization = LinearPolarization(10, 10) - polarization.degree.value = 0. - polarization.angle.value = 10 - - spectral_component = SpectralComponent('arbitrary_spectrum_name', spectrum, polarization) - source = PointSource('arbitrary_source_name', 0, 0, components=[spectral_component]) + if use_signal: + spectrum.k.value = .01 else: + spectrum.k.value = 0 + spectrum.k.free = False + + spectrum.k.units = 1/u.s/u.keV/u.cm/u.cm - source = PointSource("arbitrary_source_name", + source = PointSource("arbitrary_source_name", l=0, b=0, # Doesn't matter spectral_shape=spectrum) @@ -412,7 +428,7 @@ def main(): # Fit if unbinned: - like_fun = UnbinnedLikelihood(response, bkg) + like_fun = UnbinnedLikelihood(expectation_density) else: like_fun = PoissonLikelihood(binned_data, response, bkg) @@ -425,7 +441,14 @@ def main(): # Before the fit, you can set the parameters initial values, bounds, etc. # This is passed to the minimizer. # In addition to model. Nuisance. - cosi.bkg_parameter['norm'].value = 1 + if bkg is not None: + cosi.bkg_parameter['norm'] = Parameter("norm", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=1, # maximum value of parameter + delta=0.001, # initial step used by fitting engine + free = True) plugins = DataList(cosi) like = JointLikelihood(model, plugins) @@ -439,14 +462,22 @@ def main(): fig, ax = plt.subplots() binned_data.data.plot(ax) - expectation = response.expectation(binned_data.axes) - if bkg is not None: - expectation = expectation + bkg.expectation(binned_data.axes) - expectation.plot(ax) + + if unbinned: + x = [e.x for e in event_data] + widths = toy_axis.widths[toy_axis.find_bin(x)] + expectation_density_list = np.fromiter(expectation_density.expectation_density(), dtype=float) + ax.scatter(x, expectation_density_list * widths, s=1, color='green') + else: + expectation = response.expectation(binned_data.axes) + if bkg is not None: + expectation = expectation + bkg.expectation(binned_data.axes) + expectation.plot(ax) + plt.show() # Grid - loglike = Histogram([np.linspace(.9*nevents_signal, 1.1*nevents_signal, 30), np.linspace(.9*nevents_bkg, 1.1*nevents_bkg, 31)], labels = ['s', 'b']) + loglike = Histogram([np.linspace(.006, .016, 31), np.linspace(.006, .016, 31)], labels = ['s', 'b']) for i,s in enumerate(loglike.axes['s'].centers): for j,b in enumerate(loglike.axes['b'].centers): From 080c86ec66a5e1ba0c2265520cfdb0eaab5c6d74 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 18 Oct 2025 11:50:42 -0400 Subject: [PATCH 088/133] Fix crab unbinned. 50% error, I think due to inputs Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 16 +-- cosipy/data_io/EmCDSUnbinnedData.py | 6 +- cosipy/interfaces/data_interface.py | 2 +- ..._fit_threeml_plugin_unbinned_interfaces.py | 111 ++++++++++++++---- 4 files changed, 98 insertions(+), 37 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index b90e1219..cbb9ae9d 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -66,9 +66,9 @@ def __init__(self, for label,dist in self._distributions.items(): dist_norm = np.sum(dist) if copy: - self._distributions[label] = dist*(self._livetime/dist_norm) + self._distributions[label] = dist/dist_norm else: - dist *= (self._livetime/dist_norm) + dist /= dist_norm # These will be densify anyway since _expectation is dense # And histpy doesn't yet handle this operation efficiently @@ -197,7 +197,7 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: # Compute expectation for norm,bkg in zip(self.norms.values(), self._distributions.values()): - self._expectation += bkg * norm + self._expectation += bkg * norm * self._livetime # Cache. Regular copy is enough since norm values are float en not mutable self._last_norm_values = self.norms.copy() @@ -210,6 +210,7 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: class FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(FreeNormBackground, BackgroundDensityInterface): + @property def event_type(self) -> Type[EventInterface]: return TimeTagEmCDSEventInSCFrameInterface @@ -229,7 +230,7 @@ def __init__(self, # Energy: keV # Phi: rad # PsiChi: sr (for the phase space. The axis is a HealpixAxis) - # Time: seconds (already in super()) + # Time: seconds (taken into account by the norm (a rate) unit) psichi_frame = None @@ -272,15 +273,16 @@ def __init__(self, times = Time(jd1, jd2, format = 'jd') # Transform local to inertial - attitudes = sc_history.interp_attitude(times).transform_to(psichi_frame) sc_psichi_coord = SkyCoord(psichi_lon, psichi_lat, unit=u.rad, frame=SpacecraftFrame()) sc_psichi_vec = sc_psichi_coord.cartesian.xyz.value - inertial_psichi_vec = attitudes.rot.inv().apply(sc_psichi_vec.transpose()) + attitudes = sc_history.interp_attitude(times).transform_to(psichi_frame) + inertial_psichi_vec = attitudes.rot.apply(sc_psichi_vec.transpose()) inertial_psichi_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*inertial_psichi_vec.transpose())) inertial_psichi_coord = SkyCoord(inertial_psichi_sph, frame = psichi_frame) for label,dist in self._distributions.items(): - self._prob[self.labels.index(label)].extend(dist.interp(energy, phi, inertial_psichi_coord)) + prob = dist.interp(energy, phi, inertial_psichi_coord) + self._prob[self.labels.index(label)].extend(prob) self._prob = np.asarray(self._prob) diff --git a/cosipy/data_io/EmCDSUnbinnedData.py b/cosipy/data_io/EmCDSUnbinnedData.py index e9b927c7..bc59a09f 100644 --- a/cosipy/data_io/EmCDSUnbinnedData.py +++ b/cosipy/data_io/EmCDSUnbinnedData.py @@ -172,7 +172,7 @@ def __init__(self, self._scatt_lon = np.asarray(new_scatt_lon) def __getitem__(self, i: int) -> TimeTagEmCDSEventInSCFrameInterface: - return TimeTagEmCDSEventInSCFrame(self._jd1[i], self._jd2[i], self._energy[i], self._scatt_angle[i], + return TimeTagEmCDSEventInSCFrame(self._jd1[i], self._jd2[i], self._energy[i], self._scatt_angle[i], self._scatt_lon[i], self._scatt_lat[i], self._id[i]) @property @@ -213,7 +213,7 @@ def scattered_lon_rad_sc(self) -> Iterable[float]: @property def scattered_lat_rad_sc(self) -> Iterable[float]: - return self._scatt_angle + return self._scatt_lat class TimeTagEmCDSEventDataInSCFrameFromDC3Fits(TimeTagEmCDSEventDataInSCFrameFromArrays): @@ -237,7 +237,7 @@ def __init__(self, data_path: Union[Path, List[Path]], energy = np.append(energy, data_dict['Energies']) phi = np.append(phi, data_dict['Phi']) psi = np.append(psi, data_dict['Psi local']) - chi = np.append(psi, data_dict['Chi local']) + chi = np.append(chi, data_dict['Chi local']) # Time sort tsort = np.argsort(time) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index d1c97444..16a2be9d 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -149,7 +149,7 @@ def scattered_direction_sc(self) -> SkyCoord: Add fancy energy quantity """ return SkyCoord(self.scattered_lon_rad_sc, - np.pi / 2 - self.scattered_lat_rad_sc, + self.scattered_lat_rad_sc, unit = u.rad, frame = SpacecraftFrame()) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 73007622..27f5bd8e 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -5,6 +5,7 @@ from astropy.utils.metadata.utils import dtype from histpy import Histogram, HealpixAxis +from mhealpy import HealpixMap from cosipy.background_estimation.free_norm_threeml_binned_bkg import FreeNormBackgroundInterpolatedDensityTimeTagEmCDS from cosipy.interfaces.expectation_interface import SumExpectationDensity @@ -39,7 +40,8 @@ from astropy.time import Time import astropy.units as u -from astropy.coordinates import SkyCoord, Galactic, Angle +from astropy.coordinates import SkyCoord, Galactic, Angle, UnitSphericalRepresentation, CartesianRepresentation, \ + angular_separation import numpy as np import matplotlib.pyplot as plt @@ -53,7 +55,7 @@ def main(): - use_bkg = False + use_bkg = True profile = cProfile.Profile() @@ -74,9 +76,9 @@ def main(): output=str(dr_path), checksum='eb72400a1279325e9404110f909c7785') - sc_orientation_path = data_path / "20280301_3_month_with_orbital_info.ori" - fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', - output=str(sc_orientation_path), checksum='416fcc296fc37a056a069378a2d30cb2') + sc_orientation_path = data_path / "DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori" + fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_1sbins_GalacticEarth_SAA.ori', + output=str(sc_orientation_path), checksum='b87fd41b6c28a5c0c51448ce2964e57c') binned_bkg_data_path = data_path / "bkg_binned_data.hdf5" fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', @@ -121,6 +123,7 @@ def main(): logger.info("Setting bkg...") bkg = FreeNormBackgroundInterpolatedDensityTimeTagEmCDS(data, bkg_dist, sc_orientation, copy = False) + bkg.set_norm(5*u.Hz) logger.info("Setting bkg DONE") else: bkg = None @@ -179,30 +182,87 @@ def main(): exdenlist = np.fromiter(expectation_density.expectation_density(), dtype=float) # plot expectation density energy - energy = np.fromiter([e.energy_keV for e in data], dtype = float) - fig,ax = plt.subplots() - ax.scatter(energy, exdenlist) - ax.set_xscale('log') - ax.set_yscale('log') - h = Histogram(np.geomspace(200,2000)) - h.fill(energy) - h /= h.axis.widths - h *= np.max(exdenlist) / np.max(h) - h.plot(ax) - plt.show() + # energy = np.fromiter([e.energy_keV for e in data], dtype = float) + # fig,ax = plt.subplots() + # ax.scatter(energy, exdenlist) + # ax.set_xscale('log') + # ax.set_yscale('log') + # h = Histogram(np.geomspace(50,5000)) + # h.fill(energy) + # h /= h.axis.widths + # h *= np.max(exdenlist) / np.max(h) + # h.plot(ax) + # plt.show() # plot expectation density phi phi = np.fromiter([e.scattering_angle_rad for e in data], dtype = float) phi *= 180/3.1416 + # fig,ax = plt.subplots() + # ax.scatter(phi, exdenlist) + # h = Histogram(np.linspace(0,180)) + # h.fill(phi) + # h /= h.axis.widths + # h *= np.max(exdenlist) / np.max(h) + # h.plot(ax) + # plt.show() + + # Plot ARM + attitudes = sc_orientation.interp_attitude(data.time) + + # psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) + # coord_vec = source.position.sky_coord.transform_to(sc_orientation.attitude.frame).cartesian.xyz.value + # sc_coord_vec = attitudes.rot.inv().apply(coord_vec) + # sc_coord_sph = UnitSphericalRepresentation.from_cartesian(CartesianRepresentation(*sc_coord_vec.transpose())) + # arm = angular_separation(sc_coord_sph.lon, sc_coord_sph.lat, psichi_sc.lon, psichi_sc.lat).to_value(u.deg) - phi + # + + psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) + psichi_sc_vec = psichi_sc.to_cartesian().xyz.value + psichi_gal_vec = attitudes.rot.apply(psichi_sc_vec.transpose()) + psichi_coord = SkyCoord(CartesianRepresentation(*psichi_gal_vec.transpose()), frame = attitudes.frame) + arm = source.position.sky_coord.separation(psichi_coord).to_value(u.deg) - phi + + h = Histogram(np.linspace(-90,90,360)) + fig,ax = plt.subplots() - ax.scatter(phi, exdenlist) - h = Histogram(np.linspace(0,180)) - h.fill(phi) + ax.scatter(arm, exdenlist) + + h.fill(arm) + + h_ex = Histogram(h.axis) + h_ex.fill(arm, weight=exdenlist) + h_ex /= h # Mean + h /= h.axis.widths - h *= np.max(exdenlist) / np.max(h) - h.plot(ax) + h *= np.nanmax(h_ex) / np.max(h) # Normalize + + h.plot(ax, color = 'green') + h_ex.plot(ax, color='red') + plt.show() + # Plot CDS + # fig = plt.figure() + # ax = fig.add_subplot(1, 1, 1, projection='mollview') + # + # sc = ax.scatter(psichi_coord.l.deg, psichi_coord.b.deg, transform=ax.get_transform('world'), + # c = phi , + # cmap='inferno', + # s=2, vmin=0, vmax=180) + # + # ax.scatter(source.position.sky_coord.l.deg, source.position.sky_coord.b.deg, transform=ax.get_transform('world'), marker='x', s=100, c='red') + # + # fig.colorbar(sc, fraction=.02, label="$\phi$ [deg]") + # + # m = HealpixMap(nside=128, coordsys='galactic') + # m[:] = source.position.sky_coord.separation(m.pix2skycoord(np.arange(m.npix))).to_value(u.deg) + # img = m.get_wcs_img(ax, coord='C') #Use C for a "bug" in healpy (doesn't work the same as plot() + # ax.contour(img, levels=np.arange(0, 180, 10), cmap='inferno', + # vmin=0, vmax=180) + # plt.show() + + + like_fun = UnbinnedLikelihood(expectation_density) cosi = ThreeMLPluginInterface('cosi', like_fun, response, bkg) @@ -210,7 +270,7 @@ def main(): # Nuisance parameter guess, bounds, etc. if use_bkg: cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter - 1, # initial value of parameter + 2.5, # initial value of parameter unit = u.Hz, min_value=0, # minimum value of parameter max_value=100, # maximum value of parameter @@ -296,8 +356,8 @@ def main(): # Grid if use_bkg: - loglike = Histogram([np.geomspace(2e-6, 2e-4, 30), - np.geomspace(.1, 10, 31)], labels=['K', 'B']) + loglike = Histogram([np.geomspace(5e-6, 15e-6, 30), + np.geomspace(4, 5, 31)], labels=['K', 'B'], axis_scale='log') for i, k in enumerate(loglike.axes['K'].centers): for j, b in enumerate(loglike.axes['B'].centers): @@ -306,7 +366,6 @@ def main(): loglike[i, j] = cosi.get_log_like() - loglike.plot() else: loglike = Histogram([np.geomspace(2e-6, 2e-4, 30)], labels=['K'], axis_scale='log') @@ -315,7 +374,7 @@ def main(): loglike[i] = cosi.get_log_like() - loglike.plot() + ax, plot = loglike.plot(vmin = np.max(loglike) - 25, vmax = np.max(loglike)) plt.show() From bacdb91b8da2a326b5b0ede2c3dd476404719e74 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 18 Oct 2025 12:28:50 -0400 Subject: [PATCH 089/133] All interfaces examples working Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 3 +- ...mple_crab_fit_threeml_plugin_interfaces.py | 1 + ..._fit_threeml_plugin_unbinned_interfaces.py | 66 +++++++++---------- ...ample_grb_fit_threeml_plugin_interfaces.py | 7 +- 4 files changed, 40 insertions(+), 37 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index cbb9ae9d..7a85cc27 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -196,7 +196,8 @@ def expectation(self, axes:Axes, copy:bool = True)->Histogram: self._expectation.clear() # Compute expectation - for norm,bkg in zip(self.norms.values(), self._distributions.values()): + for label,bkg in self._distributions.items(): + norm = self._norms[self.labels.index(label)] self._expectation += bkg * norm * self._livetime # Cache. Regular copy is enough since norm values are float en not mutable diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 5fc2e7d1..12b362f5 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -220,6 +220,7 @@ def main(): # Nuisance parameter guess, bounds, etc. cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter 1, # initial value of parameter + unit = u.Hz, min_value=0, # minimum value of parameter max_value=5, # maximum value of parameter delta=0.05, # initial step used by fitting engine diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 27f5bd8e..3388b6bb 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -84,7 +84,6 @@ def main(): fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', output=str(binned_bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') - profile.enable() # orientation history tstart = Time("2028-03-01 01:35:00.117") tstop = Time("2028-03-01 02:35:00.117") @@ -178,8 +177,8 @@ def main(): expectation_density = response # Test plots. REMOVE - response.set_model(model) - exdenlist = np.fromiter(expectation_density.expectation_density(), dtype=float) + # response.set_model(model) + # exdenlist = np.fromiter(expectation_density.expectation_density(), dtype=float) # plot expectation density energy # energy = np.fromiter([e.energy_keV for e in data], dtype = float) @@ -195,8 +194,8 @@ def main(): # plt.show() # plot expectation density phi - phi = np.fromiter([e.scattering_angle_rad for e in data], dtype = float) - phi *= 180/3.1416 + # phi = np.fromiter([e.scattering_angle_rad for e in data], dtype = float) + # phi *= 180/3.1416 # fig,ax = plt.subplots() # ax.scatter(phi, exdenlist) # h = Histogram(np.linspace(0,180)) @@ -207,7 +206,7 @@ def main(): # plt.show() # Plot ARM - attitudes = sc_orientation.interp_attitude(data.time) + # attitudes = sc_orientation.interp_attitude(data.time) # psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) # coord_vec = source.position.sky_coord.transform_to(sc_orientation.attitude.frame).cartesian.xyz.value @@ -216,30 +215,30 @@ def main(): # arm = angular_separation(sc_coord_sph.lon, sc_coord_sph.lat, psichi_sc.lon, psichi_sc.lat).to_value(u.deg) - phi # - psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) - psichi_sc_vec = psichi_sc.to_cartesian().xyz.value - psichi_gal_vec = attitudes.rot.apply(psichi_sc_vec.transpose()) - psichi_coord = SkyCoord(CartesianRepresentation(*psichi_gal_vec.transpose()), frame = attitudes.frame) - arm = source.position.sky_coord.separation(psichi_coord).to_value(u.deg) - phi - - h = Histogram(np.linspace(-90,90,360)) - - fig,ax = plt.subplots() - ax.scatter(arm, exdenlist) - - h.fill(arm) - - h_ex = Histogram(h.axis) - h_ex.fill(arm, weight=exdenlist) - h_ex /= h # Mean - - h /= h.axis.widths - h *= np.nanmax(h_ex) / np.max(h) # Normalize - - h.plot(ax, color = 'green') - h_ex.plot(ax, color='red') - - plt.show() + # psichi_sc = data.scattered_direction_sc.represent_as(UnitSphericalRepresentation) + # psichi_sc_vec = psichi_sc.to_cartesian().xyz.value + # psichi_gal_vec = attitudes.rot.apply(psichi_sc_vec.transpose()) + # psichi_coord = SkyCoord(CartesianRepresentation(*psichi_gal_vec.transpose()), frame = attitudes.frame) + # arm = source.position.sky_coord.separation(psichi_coord).to_value(u.deg) - phi + # + # h = Histogram(np.linspace(-90,90,360)) + # + # fig,ax = plt.subplots() + # ax.scatter(arm, exdenlist) + # + # h.fill(arm) + # + # h_ex = Histogram(h.axis) + # h_ex.fill(arm, weight=exdenlist) + # h_ex /= h # Mean + # + # h /= h.axis.widths + # h *= np.nanmax(h_ex) / np.max(h) # Normalize + # + # h.plot(ax, color = 'green') + # h_ex.plot(ax, color='red') + # + # plt.show() # Plot CDS # fig = plt.figure() @@ -283,7 +282,10 @@ def main(): # Run print(data.nevents, expectation_density.ncounts()) + profile.enable() like.fit() + profile.disable() + profile.dump_stats("prof_interfaces.prof") results = like.results @@ -378,10 +380,6 @@ def main(): plt.show() - - profile.disable() - profile.dump_stats("prof_interfaces.prof") - return diff --git a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py index 787a67a6..5ead0656 100755 --- a/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/grb/example_grb_fit_threeml_plugin_interfaces.py @@ -107,7 +107,9 @@ def main(): # ============ Interfaces ============== data = binned_data.get_em_cds() - bkg = FreeNormBinnedBackground(bkg_dist) + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=ori, + copy = False) instrument_response = BinnedInstrumentResponse(dr) @@ -134,7 +136,8 @@ def main(): # Nuisance parameter guess, bounds, etc. cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter - 0.1, # initial value of parameter + 1, + unit = u.Hz,# initial value of parameter min_value=0, # minimum value of parameter max_value=5, # maximum value of parameter delta=1e-3, # initial step used by fitting engine From 306bcf4b55cc6dbe5cf51b3467fb12f102d3ec7c Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 18 Oct 2025 14:06:27 -0400 Subject: [PATCH 090/133] clean up toy example a little Signed-off-by: Israel Martinez --- cosipy/interfaces/expectation_interface.py | 5 +- .../examples/toy/toy_implementations.py | 351 +++++++++++++ .../examples/toy/toy_interfaces_example.py | 486 +++--------------- 3 files changed, 432 insertions(+), 410 deletions(-) create mode 100644 docs/api/interfaces/examples/toy/toy_implementations.py diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 4fc0c483..4b72940f 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -119,8 +119,9 @@ class SumExpectationDensity(ExpectationDensityInterface): Convenience class to sum multiple ExpectationDensityInterface implementation """ - def __init__(self, *expectations:Tuple[ExpectationDensityInterface]): - self._expectations = expectations + def __init__(self, *expectations:Tuple[ExpectationDensityInterface, None]): + # Remove None. Accept None for convenience + self._expectations = tuple(ex for ex in expectations if ex is not None) self._event_type = expectations[0].event_type diff --git a/docs/api/interfaces/examples/toy/toy_implementations.py b/docs/api/interfaces/examples/toy/toy_implementations.py new file mode 100644 index 00000000..c688bcbf --- /dev/null +++ b/docs/api/interfaces/examples/toy/toy_implementations.py @@ -0,0 +1,351 @@ +import itertools +from typing import Dict, Iterator, Iterable, Optional, Type + +from astromodels.sources import Source +import astropy.units as u +from astropy.time import Time +from astropy.units import Quantity + +from cosipy.interfaces.background_interface import BackgroundDensityInterface +from cosipy.interfaces.data_interface import DataInterface, TimeTagEventDataInterface +from cosipy.interfaces.event_selection import EventSelectorInterface + +from cosipy.interfaces import (BinnedDataInterface, + BinnedBackgroundInterface, + BinnedThreeMLModelFoldingInterface, + BinnedThreeMLSourceResponseInterface, + UnbinnedThreeMLSourceResponseInterface, + UnbinnedThreeMLModelFoldingInterface, + EventInterface, + ThreeMLSourceResponseInterface, + TimeTagEventInterface) + +from histpy import Axis, Axes, Histogram +import numpy as np +from scipy.stats import norm, uniform + +from threeML import Constant, PointSource, Model, JointLikelihood, DataList + +from matplotlib import pyplot as plt + +import copy + +""" +This is an example on how to use the new interfaces. + +To keep things simple, example itself is a toy model. +It a 1D model, with a Gaussian signal on top of a flat +uniform background. You can execute it until the end +to see a plot on how it looks like. + +It looks nothing like COSI data, but +shows how generic the interfaces can be. +""" + +# ======== Create toy interfaces for this model =========== +class ToyEvent(TimeTagEventInterface, EventInterface): + """ + Unit-less 1D data of a measurement called "x" (could be anything) + """ + + data_space_units = u.s + + def __init__(self, index:int, x:float, time:Time): + self._id = index + self._x = x + self._jd1 = time.jd1 + self._jd2 = time.jd2 + + @property + def id(self): + return self._id + + @property + def x(self): + return self._x + + @property + def jd1(self): + return self._jd1 + + @property + def jd2(self): + return self._jd2 + +class ToyData(DataInterface): + pass + +class ToyEventDataStream(ToyData): + # This simulates reading event from file + # Check that they are not being read twice + + def __init__(self, nevents_signal, nevents_bkg, min_value, max_value, tstart, tstop): + + rng = np.random.default_rng() + + signal = rng.normal(size=nevents_signal) + bkg = rng.uniform(min_value, max_value, size=nevents_bkg) + + self._x = np.append(signal, bkg) + + self._tstart = tstart + self._tstop = tstop + + dt = np.random.uniform(size=self._x.size) + dt_sort = np.argsort(dt) + self._x = self._x[dt_sort] + dt = dt[dt_sort] + + self._timestamps = self._tstart + dt * u.day + + def __iter__(self) -> Iterator[ToyEvent]: + print("Loading events!") + for n,(x,t) in enumerate(zip(self._x, self._timestamps)): + yield ToyEvent(n,x,t) + +class ToyEventData(TimeTagEventDataInterface, ToyData): + # Random data. Normal signal on top of uniform bkg + + event_type = ToyEvent + + def __init__(self, loader:ToyEventDataStream, selector:EventSelectorInterface = None): + + self._loader = selector(loader) + self._cached_iter = None + self._nevents = None # After selection + + def __iter__(self) -> Iterator[ToyEvent]: + + if self._cached_iter is None: + # First call. Split. Keep one and return the other + self._loader, self._cached_iter = itertools.tee(self._loader) + return self._cached_iter + else: + # Following calls: tee the loader again + self._loader, new_iter = itertools.tee(self._loader) + return new_iter + + @property + def nevents(self) -> int: + if self._nevents is None: + # Not cached yet + self._nevents = sum(1 for _ in self) + + return self._nevents + + @property + def x(self): + return np.asarray([e.x for e in self]) + + @property + def jd1(self) -> Iterable[float]: + return np.asarray([e.jd1 for e in self]) + + @property + def jd2(self) -> Iterable[float]: + return np.asarray([e.jd2 for e in self]) + +class ToyBinnedData(BinnedDataInterface, ToyData): + + def __init__(self, data:Histogram): + + if data.ndim != 1: + raise ValueError("ToyBinnedData only take a 1D histogram") + + if data.axis.label != 'x': + raise ValueError("ToyBinnedData requires an axis labeled 'x'") + + self._data = data + + @property + def data(self) -> Histogram: + return self._data + + @property + def axes(self) -> Axes: + return self._data.axes + + def fill(self, event_data:Iterable[ToyEvent]): + + x = np.fromiter([e.x for e in event_data], dtype = float) + + self._data.fill(x) + +class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): + """ + Models a uniform background + + # Since the interfaces are Protocols, they don't *have* + # to derive from the base class, but doing some helps + # code readability, especially if you use an IDE. + """ + + def __init__(self, data: ToyEventData, duration:Quantity, axis:Axis): + + self._data = data + self._duration = duration.to_value(u.s) + self._unit_expectation = Histogram(axis) + self._unit_expectation[:] = self._duration / self._unit_expectation.nbins + self._norm = 1 # Hz + + self._unit_expectation_density = self._duration / (axis.hi_lim - axis.lo_lim) + + @property + def event_type(self) -> Type[EventInterface]: + return ToyEvent + + def set_parameters(self, **parameters:u.Quantity) -> None: + self._norm = parameters['norm'].to_value(u.Hz) + + def ncounts(self) -> float: + return self._norm * self._duration + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + for _ in itertools.islice(self._data, start, stop): + yield self._norm * self._unit_expectation_density + + @property + def parameters(self) -> Dict[str, u.Quantity]: + return {'norm': u.Quantity(self._norm, u.Hz)} + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + # Always a copy + return self._unit_expectation * self._norm + +class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): + """ + This models a Gaussian signal in 1D, centered at 0 and with std = 1. + The normalization --the "flux"-- is the only free parameters + """ + + def __init__(self, data: ToyEventData, duration:Quantity, axis:Axis): + self._data = data + self._source = None + self._duration = duration.to_value(u.s) + self._unit_expectation = Histogram(axis, + contents= self._duration * np.diff(norm.cdf(axis.edges))) + + @property + def event_type(self) -> Type[EventInterface]: + return ToyEvent + + def ncounts(self) -> float: + + if self._source is None: + raise RuntimeError("Set a source first") + + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. + # This response doesn't have effective area or energy sensitivity. We're just using K as a rate + ns_events = self._duration * self._source.spectrum.main.shape.k.value + return ns_events + + def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + cache = norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) + + for prob in cache: + yield prob + + # Alternative version without cache (slower) + # for event in itertools.islice(self._data, start, stop): + # yield norm.pdf(event.x) + + def set_source(self, source: Source): + + if not isinstance(source, PointSource): + raise TypeError("I only know how to handle point sources!") + + self._source = source + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + if axes != self._unit_expectation.axes: + raise ValueError("Wrong axes. I have fixed axes.") + + if self._source is None: + raise RuntimeError("Set a source first") + + # Get the latest values of the flux + # Remember that _model can be modified externally between calls. + # Always copies + return self._unit_expectation * self._source.spectrum.main.shape.k.value + + def copy(self) -> "ToyPointSourceResponse": + # We are not caching any results, so it's safe to do shallow copy without + # re-initializing any member. + return copy.copy(self) + +class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): + + def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): + + self._data = data + self._model = None + + self._psr = psr + self._psr_copies = {} + + @property + def event_type(self): + return ToyEvent + + def ncounts(self) -> float: + + ncounts = 0 + + for source_name,psr in self._psr_copies.items(): + ncounts += psr.ncounts() + + return ncounts + + def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + + self._cache_psr_copies() + + if not self._psr_copies: + for _ in itertools.islice(self._data, start, stop): + yield 0 + else: + for expectation in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): + yield np.sum(expectation) + + def set_model(self, model: Model): + + self._model = model + + def _cache_psr_copies(self): + + new_psr_copies = {} + + for name,source in self._model.sources.items(): + + if name in self._psr_copies: + # Use cache + new_psr_copies[name] = self._psr_copies[name] + + psr_copy = self._psr.copy() + psr_copy.set_source(source) + + new_psr_copies[name] = psr_copy + + self._psr_copies = new_psr_copies + + def expectation(self, axes:Axes, copy = True) -> Histogram: + + self._cache_psr_copies() + + expectation = Histogram(axes) + + for source_name,psr in self._psr_copies.items(): + expectation += psr.expectation(axes, copy = False) + + # Always a copy + return expectation + +# ======= Actual code. This is how the "tutorial" will look like ================ \ No newline at end of file diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index e2ff2a97..06daeeb5 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -1,407 +1,80 @@ -import itertools -from typing import Dict, Any, Generator, Iterator, Iterable, Optional, Union, Type +from toy_implementations import * -from astromodels.sources import Source -from astromodels import LinearPolarization, SpectralComponent, Parameter -from astromodels.core.polarization import Polarization +from astromodels import Parameter import astropy.units as u from astropy.time import Time -from astropy.units import Quantity -from numpy.ma.core import logical_or - -from cosipy import SpacecraftHistory from cosipy.event_selection.time_selection import TimeSelector -from cosipy.interfaces.background_interface import BackgroundDensityInterface -from cosipy.interfaces.data_interface import EventDataInterface, DataInterface, TimeTagEventDataInterface -from cosipy.interfaces.event_selection import EventSelectorInterface from cosipy.interfaces.expectation_interface import SumExpectationDensity from cosipy.statistics import PoissonLikelihood, UnbinnedLikelihood - -from cosipy.interfaces import (BinnedDataInterface, - BinnedBackgroundInterface, - BinnedThreeMLModelFoldingInterface, - BinnedThreeMLSourceResponseInterface, - ThreeMLPluginInterface, - UnbinnedThreeMLSourceResponseInterface, UnbinnedThreeMLModelFoldingInterface, EventInterface, - ThreeMLSourceResponseInterface, TimeTagEventInterface) -from histpy import Axis, Axes, Histogram +from cosipy.interfaces import ThreeMLPluginInterface +from histpy import Axis, Histogram import numpy as np -from scipy.stats import norm, uniform from threeML import Constant, PointSource, Model, JointLikelihood, DataList from matplotlib import pyplot as plt -import copy - -""" -This is an example on how to use the new interfaces. - -To keep things simple, example itself is a toy model. -It a 1D model, with a Gaussian signal on top of a flat -uniform background. You can execute it until the end -to see a plot on how it looks like. - -It looks nothing like COSI data, but -shows how generic the interfaces can be. -""" - -# ======== Create toy interfaces for this model =========== - -# Simple 1D axes. Hardcoded. -toy_axis = Axis(np.linspace(-5, 5), label = 'x') -nevents_signal = 1000 -nevents_bkg = 1000 -nevents_tot = nevents_signal + nevents_bkg -use_bkg = True -use_signal = True - -class ToyEvent(TimeTagEventInterface, EventInterface): - """ - Unit-less 1D data of a measurement called "x" (could be anything) - """ - - data_space_units = u.s - - def __init__(self, index:int, x:float, time:Time): - self._id = index - self._x = x - self._jd1 = time.jd1 - self._jd2 = time.jd2 - - @property - def id(self): - return self._id - - @property - def x(self): - return self._x - - @property - def jd1(self): - return self._jd1 - - @property - def jd2(self): - return self._jd2 - -class ToyData(DataInterface): - pass - -class ToyEventDataLoader(ToyData): - # This simulates reading event from file - # Check that they are not being read twice - - def __init__(self): - rng = np.random.default_rng() - - signal = rng.normal(size=nevents_signal) - bkg = rng.uniform(toy_axis.lo_lim, toy_axis.hi_lim, size=nevents_bkg) - - if use_signal and use_bkg: - self._x = np.append(signal,bkg) - elif use_bkg: - self._x = bkg - elif use_signal: - self._x = signal - - self._tstart = Time("2000-01-01T00:00:00") - self._tstop = Time("2000-01-02T00:00:00") - - dt = np.random.uniform(size=self._x.size) - dt_sort = np.argsort(dt) - self._x = self._x[dt_sort] - dt = dt[dt_sort] - - self._timestamps = self._tstart + dt * u.day - - def __iter__(self) -> Iterator[ToyEvent]: - print("Loading events!") - for n,(x,t) in enumerate(zip(self._x, self._timestamps)): - yield ToyEvent(n,x,t) - -class ToyEventData(TimeTagEventDataInterface, ToyData): - # Random data. Normal signal on top of uniform bkg - - event_type = ToyEvent - - def __init__(self, loader:ToyEventDataLoader, selector:EventSelectorInterface = None): - - self._loader = selector(loader) - self._cached_iter = None - self._nevents = None # After selection - - def __iter__(self) -> Iterator[ToyEvent]: - - if self._cached_iter is None: - # First call. Split. Keep one and return the other - self._loader, self._cached_iter = itertools.tee(self._loader) - return self._cached_iter - else: - # Following calls: tee the loader again - self._loader, new_iter = itertools.tee(self._loader) - return new_iter - - @property - def nevents(self) -> int: - if self._nevents is None: - # Not cached yet - self._nevents = sum(1 for _ in self) - - return self._nevents - - @property - def x(self): - return np.asarray([e.x for e in self]) - - @property - def jd1(self) -> Iterable[float]: - return np.asarray([e.jd1 for e in self]) - - @property - def jd2(self) -> Iterable[float]: - return np.asarray([e.jd2 for e in self]) - -class ToyBinnedData(BinnedDataInterface, ToyData): - - def __init__(self, data:Histogram): - - if data.ndim != 1: - raise ValueError("ToyBinnedData only take a 1D histogram") - - if data.axis.label != 'x': - raise ValueError("ToyBinnedData requires an axis labeled 'x'") - - self._data = data - - @property - def data(self) -> Histogram: - return self._data - - @property - def axes(self) -> Axes: - return self._data.axes - -class ToyBkg(BinnedBackgroundInterface, BackgroundDensityInterface): - """ - Models a uniform background - - # Since the interfaces are Protocols, they don't *have* - # to derive from the base class, but doing some helps - # code readability, especially if you use an IDE. - """ - - def __init__(self, data: ToyEventData, duration:Quantity): - - self._data = data - self._duration = duration.to_value(u.s) - self._unit_expectation = Histogram(toy_axis) - self._unit_expectation[:] = self._duration / self._unit_expectation.nbins - self._norm = 1 # Hz - - self._unit_expectation_density = self._duration / (toy_axis.hi_lim - toy_axis.lo_lim) - - @property - def event_type(self) -> Type[EventInterface]: - return ToyEvent - - def set_parameters(self, **parameters:u.Quantity) -> None: - self._norm = parameters['norm'].to_value(u.Hz) - - def ncounts(self) -> float: - return self._norm * self._duration - - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - - for _ in itertools.islice(self._data, start, stop): - yield self._norm * self._unit_expectation_density - - @property - def parameters(self) -> Dict[str, u.Quantity]: - return {'norm': u.Quantity(self._norm, u.Hz)} - - def expectation(self, axes:Axes, copy = True) -> Histogram: - - if axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") - - # Always a copy - return self._unit_expectation * self._norm - -class ToyPointSourceResponse(BinnedThreeMLSourceResponseInterface, UnbinnedThreeMLSourceResponseInterface): - """ - This models a Gaussian signal in 1D, centered at 0 and with std = 1. - The normalization --the "flux"-- is the only free parameters - """ - - def __init__(self, data: ToyEventData, duration:Quantity): - self._data = data - self._source = None - self._duration = duration.to_value(u.s) - self._unit_expectation = Histogram(toy_axis, - contents= self._duration * np.diff(norm.cdf(toy_axis.edges))) - - @property - def event_type(self) -> Type[EventInterface]: - return ToyEvent - - def ncounts(self) -> float: - - if self._source is None: - raise RuntimeError("Set a source first") - - # Get the latest values of the flux - # Remember that _model can be modified externally between calls. - # This response doesn't have effective area or energy sensitivity. We're just using K as a rate - ns_events = self._duration * self._source.spectrum.main.shape.k.as_quantity.to_value(1/(u.s * u.keV * u.cm * u.cm)) - return ns_events - - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - - cache = norm.pdf([event.x for event in itertools.islice(self._data, start, stop)]) - - for prob in cache: - yield prob - - # Alternative version without cache (slower) - # for event in itertools.islice(self._data, start, stop): - # yield norm.pdf(event.x) - - def set_source(self, source: Source): - - if not isinstance(source, PointSource): - raise TypeError("I only know how to handle point sources!") - - self._source = source - - def expectation(self, axes:Axes, copy = True) -> Histogram: - - if axes != self._unit_expectation.axes: - raise ValueError("Wrong axes. I have fixed axes.") - - if self._source is None: - raise RuntimeError("Set a source first") - - # Get the latest values of the flux - # Remember that _model can be modified externally between calls. - # Always copies - return self._unit_expectation * self._source.spectrum.main.shape.k.as_quantity.to_value(1/(u.s * u.keV * u.cm * u.cm)) - - def copy(self) -> "ToyPointSourceResponse": - # We are not caching any results, so it's safe to do shallow copy without - # re-initializing any member. - return copy.copy(self) - -class ToyModelFolding(BinnedThreeMLModelFoldingInterface, UnbinnedThreeMLModelFoldingInterface): - - def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): - - self._data = data - self._model = None - - self._psr = psr - self._psr_copies = {} - - @property - def event_type(self): - return ToyEvent - - def ncounts(self) -> float: - - ncounts = 0 - - for source_name,psr in self._psr_copies.items(): - ncounts += psr.ncounts() - - return ncounts - - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: - - self._cache_psr_copies() - - if not self._psr_copies: - for _ in itertools.islice(self._data, start, stop): - yield 0 - else: - for expectation in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): - yield np.sum(expectation) - - def set_model(self, model: Model): - - self._model = model - - def _cache_psr_copies(self): - - new_psr_copies = {} - - for name,source in self._model.sources.items(): - - if name in self._psr_copies: - # Use cache - new_psr_copies[name] = self._psr_copies[name] - - psr_copy = self._psr.copy() - psr_copy.set_source(source) - - new_psr_copies[name] = psr_copy - - self._psr_copies = new_psr_copies - - def expectation(self, axes:Axes, copy = True) -> Histogram: - - self._cache_psr_copies() - - expectation = Histogram(axes) - - for source_name,psr in self._psr_copies.items(): - expectation += psr.expectation(Axes(toy_axis), copy = False) - - # Always a copy - return expectation - -def get_binned_data(event_data:ToyEventData, axis:Axis) -> ToyBinnedData: - """ - Only bins x axis - """ - - binned_data = Histogram(axis) - binned_data.fill(event_data.x) - - return ToyBinnedData(binned_data) - -# ======= Actual code. This is how the "tutorial" will look like ================ def main(): - # Binned or unbinned - unbinned = True - plot = True - - # Set the inputs. These will eventually open file or set specific parameters, - # but since we are generating the data and models on the fly, and most parameter - # are hardcoded above withing the classes, then it's not necessary here. + # This axis is user for binning the data in the binned analysis case + # The unbinned analysis also uses to lower and upper limits, as well as for plotting + toy_axis = Axis(np.linspace(-5, 5), label='x') + + # Some options + unbinned = True # Binned=False or unbinned=True + plot = True # Plots the fit + use_signal = True # False = bkg-only + use_bkg = True # False = signal-only + + # This simulates a stream of events. It can come from a file or some other source + # ToyEventDataStream and ToyEventData could have been simplified into a single + # class, but I wanted to exercise the case of a consumable stream, which is + # cached by ToyEventData and used in the rest of the analysis. + # The event have an 'x' value and time. + # For the signal, the 'x' values are randomly drawn from a standard normal distribution + # For the background, the 'x' value are randomly drawn from a uniform distribution + # The timestamps are randomly drawn from a uniform distribution in both cases. + # All the events are time-sorted. + data_loader = ToyEventDataStream(nevents_signal= 1000 if use_signal else 0, + nevents_bkg= 1000 if use_bkg else 0, + min_value=toy_axis.lo_lim, + max_value=toy_axis.hi_lim, + tstart=Time("2000-01-01T00:00:00"), + tstop=Time("2000-01-02T00:00:00")) + + # Make a selection. A simple time selection in this case + # TimeSelector assumed the events are time-sorted and will stop the stream + # of events once tstop is reached tstart = Time("2000-01-01T01:00:00") tstop = Time("2000-01-01T10:00:00") duration = tstop - tstart selector = TimeSelector(tstart = tstart, tstop = tstop) - data_loader = ToyEventDataLoader() event_data = ToyEventData(data_loader, selector=selector) - psr = ToyPointSourceResponse(data = event_data, duration = duration) - response = ToyModelFolding(data = event_data, psr = psr) + # This is the expectation from a single source, which is just the standard normal + # distribution + # This class handles both the binned and the unbinned case. + psr = ToyPointSourceResponse(data = event_data, duration = duration, axis = toy_axis) + + # This combines the expectation from multiple + model_folding = ToyModelFolding(data = event_data, psr = psr) if use_bkg: - bkg = ToyBkg(data = event_data, duration = duration) - expectation_density = SumExpectationDensity(response, bkg) + # The expectation from background, which is flat + # This class handles both the binned and the unbinned case + bkg = ToyBkg(data = event_data, duration = duration, axis = toy_axis) else: bkg = None - expectation_density = response - ## Source model - ## We'll just use the K value in u.cm / u.cm / u.s / u.keV + # Source model + # Since this is a toy model with no position or energy dependence, + # we'll just use the normalization K value and ignore the units + # The default units are 1 / (keV s cm2), which make sure for an astrophysical + # source, but for this toy model. spectrum = Constant() if use_signal: @@ -410,7 +83,7 @@ def main(): spectrum.k.value = 0 spectrum.k.free = False - spectrum.k.units = 1/u.s/u.keV/u.cm/u.cm + spectrum.k.min_value = 0 source = PointSource("arbitrary_source_name", l=0, b=0, # Doesn't matter @@ -418,29 +91,32 @@ def main(): model = Model(source) - # Optional: Perform a background-only or a null-background fit - #bkg = None # Uncomment for no bkg - #model = Model() # Uncomment for bkg-only hypothesis - + # Data binning true the interface fill() method binned_data = None if plot or not unbinned: - binned_data = get_binned_data(event_data, toy_axis) + binned_data = ToyBinnedData(Histogram(toy_axis)) + binned_data.fill(event_data) - # Fit + # Set the likelihood function we'll use if unbinned: + expectation_density = SumExpectationDensity(model_folding, bkg) like_fun = UnbinnedLikelihood(expectation_density) else: - like_fun = PoissonLikelihood(binned_data, response, bkg) + like_fun = PoissonLikelihood(binned_data, model_folding, bkg) + # Initiate the 3ML plugin + # This plugin will internally call + # response.set_model() and bkg.set_parameter() + # which will cause the like_fun result to change on each call cosi = ThreeMLPluginInterface('cosi', like_fun, - response = response, + response = model_folding, bkg = bkg) - # Before the fit, you can set the parameters initial values, bounds, etc. + # Before the fit, you can set the background parameters initial values, bounds, etc. # This is passed to the minimizer. - # In addition to model. Nuisance. + # The source model parameters were already set above if bkg is not None: cosi.bkg_parameter['norm'] = Parameter("norm", # background parameter 1, # initial value of parameter @@ -450,10 +126,10 @@ def main(): delta=0.001, # initial step used by fitting engine free = True) - plugins = DataList(cosi) - like = JointLikelihood(model, plugins) + # Fit + plugins = DataList(cosi) # Each instrument or data set + like = JointLikelihood(model, plugins) # Everything connects here - # Run minimizer like.fit() print(like.minimizer) @@ -461,33 +137,27 @@ def main(): if plot: fig, ax = plt.subplots() - binned_data.data.plot(ax) + if unbinned: - x = [e.x for e in event_data] - widths = toy_axis.widths[toy_axis.find_bin(x)] + # Divide by bin width to plot the density + (binned_data.data/toy_axis.widths).plot(ax) + + # Get the expectation density from the fitted result for each event expectation_density_list = np.fromiter(expectation_density.expectation_density(), dtype=float) - ax.scatter(x, expectation_density_list * widths, s=1, color='green') + ax.scatter(event_data.x, expectation_density_list, s=1, color='green') + + ax.set_ylabel("Counts density") else: - expectation = response.expectation(binned_data.axes) + binned_data.data.plot(ax) + expectation = model_folding.expectation(binned_data.axes) + if bkg is not None: expectation = expectation + bkg.expectation(binned_data.axes) - expectation.plot(ax) - - plt.show() - - # Grid - loglike = Histogram([np.linspace(.006, .016, 31), np.linspace(.006, .016, 31)], labels = ['s', 'b']) - for i,s in enumerate(loglike.axes['s'].centers): - for j,b in enumerate(loglike.axes['b'].centers): - - spectrum.k.value = s - cosi.bkg_parameter['norm'].value = b - - loglike[i,j] = cosi.get_log_like() + expectation.plot(ax) - loglike.plot() + ax.set_ylabel("Counts") plt.show() From 5912a90b64ae71c79b5028c7054c9ef06c30d7b6 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sat, 18 Oct 2025 20:19:03 -0400 Subject: [PATCH 091/133] change ncounts to expected counts and remote start/stop from expectation density Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 15 ++------- cosipy/interfaces/expectation_interface.py | 32 +++++++------------ cosipy/statistics/likelihood_functions.py | 2 +- cosipy/threeml/psr_fixed_ei.py | 6 ++-- cosipy/threeml/unbinned_model_folding.py | 10 +++--- ...mple_crab_fit_threeml_plugin_interfaces.py | 21 ++++++------ ..._fit_threeml_plugin_unbinned_interfaces.py | 2 +- .../examples/toy/toy_implementations.py | 10 +++--- .../examples/toy/toy_interfaces_example.py | 2 +- 9 files changed, 40 insertions(+), 60 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 7a85cc27..9a1e98f6 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -287,27 +287,16 @@ def __init__(self, self._prob = np.asarray(self._prob) - def ncounts(self) -> float: + def expected_counts(self) -> float: """ Total expected counts """ return self._livetime * self._norm - def expectation_density(self, start: Optional[int] = None, stop: Optional[int] = None) -> Iterable[float]: + def expectation_density(self) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. This equals the event probabiliy times the number of events - - This is provided as a helper function assuming the child classes implemented event_probability - - Parameters - ---------- - start - stop - - Returns - ------- - """ # Multiply each probability by the norm, and then sum diff --git a/cosipy/interfaces/expectation_interface.py b/cosipy/interfaces/expectation_interface.py index 4b72940f..8893b200 100644 --- a/cosipy/interfaces/expectation_interface.py +++ b/cosipy/interfaces/expectation_interface.py @@ -52,12 +52,12 @@ def event_type(self) -> Type[EventInterface]: The event class that the implementation can handle """ - def ncounts(self) -> float: + def expected_counts(self) -> float: """ Total expected counts """ - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self) -> Iterable[float]: """ Return the probability of obtaining the observed set of measurement of each event, given that the event was detected. It equals the expectation density times ncounts @@ -66,14 +66,6 @@ def event_probability(self, start:Optional[int] = None, stop:Optional[int] = Non e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV This is provided as a helper function assuming the child classes implemented expectation_density - - - Parameters - ---------- - start : None | int - From beginning by default - stop: None|int - Until the end by default """ # Guard to avoid infinite recursion in incomplete child classes @@ -84,10 +76,10 @@ def event_probability(self, start:Optional[int] = None, stop:Optional[int] = Non cls.expectation_density is ExpectationDensityInterface.expectation_density): raise NotImplementedError("Implement event_probability and/or expectation_density") - ncounts = self.ncounts() - return [expectation/ncounts for expectation in self.expectation_density(start, stop)] + ncounts = self.expected_counts() + return [expectation/ncounts for expectation in self.expectation_density()] - def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def expectation_density(self) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. This equals the event probabiliy times the number of events @@ -111,8 +103,8 @@ def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = N cls.expectation_density is ExpectationDensityInterface.expectation_density): raise NotImplementedError("Implement event_probability and/or expectation_density") - ncounts = self.ncounts() - return [prob*ncounts for prob in self.event_probability(start, stop)] + ncounts = self.expected_counts() + return [prob*ncounts for prob in self.event_probability()] class SumExpectationDensity(ExpectationDensityInterface): """ @@ -120,7 +112,7 @@ class SumExpectationDensity(ExpectationDensityInterface): """ def __init__(self, *expectations:Tuple[ExpectationDensityInterface, None]): - # Remove None. Accept None for convenience + # Allow None for convenience, we should remove it self._expectations = tuple(ex for ex in expectations if ex is not None) self._event_type = expectations[0].event_type @@ -136,15 +128,15 @@ def event_type(self) -> Type[EventInterface]: """ return self._event_type - def ncounts(self) -> float: + def expected_counts(self) -> float: """ Total expected counts """ - return sum(ex.ncounts() for ex in self._expectations) + return sum(ex.expected_counts() for ex in self._expectations) - def expectation_density(self, start: Optional[int] = None, stop: Optional[int] = None) -> Iterable[float]: + def expectation_density(self) -> Iterable[float]: - for exdensity in zip(*[ex.expectation_density(start, stop) for ex in self._expectations]): + for exdensity in zip(*[ex.expectation_density() for ex in self._expectations]): yield sum(exdensity) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 56ad2f66..8ab5cd29 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -54,7 +54,7 @@ def nobservations(self) -> int: def get_log_like(self) -> float: # Total number of events - ntot = self._expectation.ncounts() + ntot = self._expectation.expected_counts() # It's faster to compute all log values at once, but requires keeping them in memory # Doing it by chunk is a compromise. We might need to adjust the chunk_size diff --git a/cosipy/threeml/psr_fixed_ei.py b/cosipy/threeml/psr_fixed_ei.py index e8ff535f..e06af95c 100644 --- a/cosipy/threeml/psr_fixed_ei.py +++ b/cosipy/threeml/psr_fixed_ei.py @@ -184,7 +184,7 @@ def _update_cache(self): self._last_convolved_source_dict = source_dict self._last_convolved_source_skycoord = coord.copy() - def ncounts(self) -> float: + def expected_counts(self) -> float: """ Total expected counts """ @@ -194,7 +194,7 @@ def ncounts(self) -> float: return self._nevents - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. @@ -209,4 +209,4 @@ def event_probability(self, start:Optional[int] = None, stop:Optional[int] = Non self._update_cache() - return self._event_prob[start:stop] + return self._event_prob diff --git a/cosipy/threeml/unbinned_model_folding.py b/cosipy/threeml/unbinned_model_folding.py index 62a2f031..297b9441 100644 --- a/cosipy/threeml/unbinned_model_folding.py +++ b/cosipy/threeml/unbinned_model_folding.py @@ -47,16 +47,16 @@ def set_model(self, model: Model): """ self._model = model - def ncounts(self) -> float: + def expected_counts(self) -> float: """ Total expected counts """ self._cache_source_responses() - return sum(s.ncounts() for s in self._source_responses.values()) + return sum(s.expected_counts() for s in self._source_responses.values()) - def event_probability(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: + def event_probability(self) -> Iterable[float]: """ Return the expected number of counts density from the start-th event to the stop-th event. @@ -71,7 +71,7 @@ def event_probability(self, start:Optional[int] = None, stop:Optional[int] = Non self._cache_source_responses() - sources_expectation_iter = itertools.product(*(s.expectation_density(start, stop) for s in self._source_responses.values())) - ncounts = self.ncounts() + sources_expectation_iter = itertools.product(*(s.expectation_density() for s in self._source_responses.values())) + ncounts = self.expected_counts() return [sum(expectations)/ncounts for expectations in sources_expectation_iter] diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 12b362f5..d8ba6955 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -68,7 +68,7 @@ import matplotlib.pyplot as plt from threeML import Band, PointSource, Model, JointLikelihood, DataList -from astromodels import Parameter +from astromodels import Parameter, Powerlaw from pathlib import Path @@ -222,7 +222,7 @@ def main(): 1, # initial value of parameter unit = u.Hz, min_value=0, # minimum value of parameter - max_value=5, # maximum value of parameter + max_value=100, # maximum value of parameter delta=0.05, # initial step used by fitting engine ) @@ -266,16 +266,13 @@ def main(): spectrum.alpha.delta = 0.01 spectrum.beta.delta = 0.01 - source = PointSource("source", # Name of source (arbitrary, but needs to be unique) - l = l, # Longitude (deg) - b = b, # Latitude (deg) - spectral_shape = spectrum) # Spectral model + source = PointSource("source", # Name of source (arbitrary, but needs to be unique) + l=l, # Longitude (deg) + b=b, # Latitude (deg) + spectral_shape=spectrum) # Spectral model - # Optional: free the position parameters - #source.position.l.free = True - #source.position.b.free = True - - model = Model(source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) + model = Model( + source) # Model with single source. If we had multiple sources, we would do Model(source1, source2, ...) # Optional: if you want to call get_log_like manually, then you also need to set the model manually # 3ML does this internally during the fit though @@ -406,6 +403,8 @@ def main(): ax.legend() + ax.set_ylim(.1,100) + plt.show() # Plot the fitted spectrum convolved with the response, as well as the simulated source counts diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index 3388b6bb..bf34be5b 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -281,7 +281,7 @@ def main(): like = JointLikelihood(model, plugins, verbose = False) # Run - print(data.nevents, expectation_density.ncounts()) + print(data.nevents, expectation_density.expected_counts()) profile.enable() like.fit() profile.disable() diff --git a/docs/api/interfaces/examples/toy/toy_implementations.py b/docs/api/interfaces/examples/toy/toy_implementations.py index c688bcbf..b36ca8d4 100644 --- a/docs/api/interfaces/examples/toy/toy_implementations.py +++ b/docs/api/interfaces/examples/toy/toy_implementations.py @@ -197,7 +197,7 @@ def event_type(self) -> Type[EventInterface]: def set_parameters(self, **parameters:u.Quantity) -> None: self._norm = parameters['norm'].to_value(u.Hz) - def ncounts(self) -> float: + def expected_counts(self) -> float: return self._norm * self._duration def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = None) -> Iterable[float]: @@ -234,7 +234,7 @@ def __init__(self, data: ToyEventData, duration:Quantity, axis:Axis): def event_type(self) -> Type[EventInterface]: return ToyEvent - def ncounts(self) -> float: + def expected_counts(self) -> float: if self._source is None: raise RuntimeError("Set a source first") @@ -295,12 +295,12 @@ def __init__(self, data:ToyEventData, psr: ToyPointSourceResponse): def event_type(self): return ToyEvent - def ncounts(self) -> float: + def expected_counts(self) -> float: ncounts = 0 for source_name,psr in self._psr_copies.items(): - ncounts += psr.ncounts() + ncounts += psr.expected_counts() return ncounts @@ -312,7 +312,7 @@ def expectation_density(self, start:Optional[int] = None, stop:Optional[int] = N for _ in itertools.islice(self._data, start, stop): yield 0 else: - for expectation in zip(*[p.expectation_density(start, stop) for p in self._psr_copies.values()]): + for expectation in zip(*[p.expectation_density() for p in self._psr_copies.values()]): yield np.sum(expectation) def set_model(self, model: Model): diff --git a/docs/api/interfaces/examples/toy/toy_interfaces_example.py b/docs/api/interfaces/examples/toy/toy_interfaces_example.py index 06daeeb5..e1119c8e 100644 --- a/docs/api/interfaces/examples/toy/toy_interfaces_example.py +++ b/docs/api/interfaces/examples/toy/toy_interfaces_example.py @@ -24,7 +24,7 @@ def main(): toy_axis = Axis(np.linspace(-5, 5), label='x') # Some options - unbinned = True # Binned=False or unbinned=True + unbinned = False # Binned=False or unbinned=True plot = True # Plots the fit use_signal = True # False = bkg-only use_bkg = True # False = signal-only From 2eeba6408cad33b7f6f0259c94eb93e64c595f8f Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 19 Oct 2025 21:56:13 -0400 Subject: [PATCH 092/133] fit one full orbit Signed-off-by: Israel Martinez --- ...xample_crab_fit_threeml_plugin_unbinned_interfaces.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py index bf34be5b..77e66f3e 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_unbinned_interfaces.py @@ -85,8 +85,9 @@ def main(): output=str(binned_bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') # orientation history - tstart = Time("2028-03-01 01:35:00.117") - tstop = Time("2028-03-01 02:35:00.117") + # About 1 full orbit ~1.7 hr + tstart = Time("2028-03-01 02:00:00.117") + tstop = Time("2028-03-01 03:42:00.117") sc_orientation = SpacecraftHistory.open(sc_orientation_path) sc_orientation = sc_orientation.select_interval(tstart, tstop) @@ -128,7 +129,9 @@ def main(): bkg = None # Prepare point source response, which convolved the IRF with the SC orientation - psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, dr.axes['Ei'].centers) + ei_samples = np.geomspace(100, 5000, 100)*u.keV + psr = UnbinnedThreeMLPointSourceResponseTrapz(data, irf, sc_orientation, + ei_samples) # Prepare the model l = 184.56 From 62079b3ca0027e4af1522414c9c87950eacfd1be Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Mon, 20 Oct 2025 16:51:24 -0500 Subject: [PATCH 093/133] Modifed GTI class --- cosipy/event_selection/good_time_interval.py | 32 ++++++++------------ 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/cosipy/event_selection/good_time_interval.py b/cosipy/event_selection/good_time_interval.py index d307a6c7..23024a63 100644 --- a/cosipy/event_selection/good_time_interval.py +++ b/cosipy/event_selection/good_time_interval.py @@ -14,12 +14,6 @@ def __init__(self, starts, stops): Start times of GTI intervals stops : astropy.time.Time (array) Stop times of GTI intervals - - Notes - ----- - Currently, unix + utc is assumed. - When the default time format/system is fixed, - this class should be modified. """ # Check that starts and stops have the same scale if not np.all(starts.scale == stops.scale): @@ -29,6 +23,10 @@ def __init__(self, starts, stops): if starts.format != stops.format: raise ValueError(f"Time format mismatch between starts ({starts.format}) and stops ({stops.format})") + # Check that starts and stops have the same length + if len(starts) != len(stops): + raise ValueError(f"Length mismatch between starts ({len(starts)}) and stops ({len(stops)})") + self.starts = starts self.stops = stops @@ -102,7 +100,7 @@ def is_in_gti(self, time): # Check time scale if time.scale != self.starts.scale: raise ValueError(f"Time scale mismatch. Expected {self.starts.scale.upper()}, " - f"got {time.scale.upper()}") + f"got {time.scale.upper()}") # Get values using the format attribute time_format = self.starts.format @@ -130,7 +128,7 @@ def is_in_gti(self, time): return result, indices - def save_as_fits(self, filename, overwrite=False, output_format='unix', output_unit='s'): + def save_as_fits(self, filename, overwrite=False, output_format='unix'): """ Save GTI data to a FITS file. @@ -142,8 +140,6 @@ def save_as_fits(self, filename, overwrite=False, output_format='unix', output_u If True, overwrite existing file (default: False) output_format : str, optional Time format for output (e.g., 'unix', 'mjd'). Default: 'unix' - output_unit : str, optional - Time unit for output. Default: 's' """ # Get values in the specified output format using getattr if not hasattr(self.starts, output_format): @@ -158,6 +154,9 @@ def save_as_fits(self, filename, overwrite=False, output_format='unix', output_u # Create primary HDU primary_hdu = fits.PrimaryHDU() primary_hdu.header['TIMESYS'] = output_scale.upper() + output_unit = 's' + if output_format in ['jd', 'mjd']: + output_unit = 'd' primary_hdu.header['TIMEUNIT'] = output_unit # Define table columns @@ -169,6 +168,7 @@ def save_as_fits(self, filename, overwrite=False, output_format='unix', output_u table_hdu.header['EXTNAME'] = 'GTI' table_hdu.header['TIMESYS'] = output_scale.upper() table_hdu.header['TIMEUNIT'] = output_unit + table_hdu.header['TIMEFORMAT'] = output_format # Create HDUList and write to FITS file hdul = fits.HDUList([primary_hdu, table_hdu]) @@ -180,8 +180,6 @@ def from_fits(cls, filename): Load GTI from a FITS file. Reads time format and scale from FITS header. - Currently supports UNIX time format. - TODO: Add support for MJD, MET (MJDREFI/MJDREFF) formats. Parameters ---------- @@ -206,13 +204,9 @@ def from_fits(cls, filename): infile.close() raise ValueError("GTI table not found in FITS file") - # Read time system from header - time_scale = gti_hdu.header.get('TIMESYS', 'UTC').lower() - time_unit = gti_hdu.header.get('TIMEUNIT', 's') - - # TODO: Auto-detect time format from header or data - # For now, assume UNIX time - time_format = 'unix' + # Read time system/format from header + time_scale = gti_hdu.header.get('TIMESYS', 'utc').lower() + time_format = gti_hdu.header.get('TIMEFORMAT', 'unix').lower() # Read start and stop times as arrays starts = Time(gti_hdu.data['TSTART'], format=time_format, scale=time_scale) From 5750ba7dbfcca6483b10f629f6ffa5fa27d795e7 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Mon, 20 Oct 2025 16:52:19 -0500 Subject: [PATCH 094/133] Added EventSelectorGTI --- cosipy/event_selection/gti_event_selection.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 cosipy/event_selection/gti_event_selection.py diff --git a/cosipy/event_selection/gti_event_selection.py b/cosipy/event_selection/gti_event_selection.py new file mode 100644 index 00000000..ec3fb6cc --- /dev/null +++ b/cosipy/event_selection/gti_event_selection.py @@ -0,0 +1,57 @@ +import itertools +from typing import Union, Iterable + +import numpy as np +from astropy.time import Time + +from cosipy.interfaces import TimeTagEventInterface, EventInterface +from cosipy.interfaces.event_selection import EventSelectorInterface +from cosipy.util.iterables import itertools_batched + +from .good_time_interval import GoodTimeInterval + +class EventSelectorGTI(EventSelectorInterface): + + def __init__(self, gti:GoodTimeInterval, batch_size:int = 10000): + """ + Assumes events are time-ordered + + Parameters + ---------- + gti: + batch_size: + """ + self._gti = gti + + self._batch_size = batch_size + + def _select(self, event:TimeTagEventInterface) -> bool: + # Single event + return next(iter(self.select([event]))) + + def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: + + if isinstance(events, EventInterface): + # Single event + return self._select(events) + else: + # Multiple + + # Working in chunks/batches. + # This can optimized based on the system + + for chunk in itertools_batched(events, self._batch_size): + + jd1 = [] + jd2 = [] + + for event in chunk: + jd1.append(event.jd1) + jd2.append(event.jd2) + + time = Time(jd1, jd2, format = 'jd') + + selected, gti_index = self._gti.is_in_gti(time) + + for sel in selected: + yield sel From d3251eb7b6a872d2ee3b0c8217ad024f7c41c800 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Tue, 21 Oct 2025 10:43:41 -0500 Subject: [PATCH 095/133] Add MutliTimeSelector, removed GTISelector --- ...t_selection.py => multi_time_selection.py} | 40 +++++++++++++++---- 1 file changed, 33 insertions(+), 7 deletions(-) rename cosipy/event_selection/{gti_event_selection.py => multi_time_selection.py} (52%) diff --git a/cosipy/event_selection/gti_event_selection.py b/cosipy/event_selection/multi_time_selection.py similarity index 52% rename from cosipy/event_selection/gti_event_selection.py rename to cosipy/event_selection/multi_time_selection.py index ec3fb6cc..971caa3e 100644 --- a/cosipy/event_selection/gti_event_selection.py +++ b/cosipy/event_selection/multi_time_selection.py @@ -8,22 +8,45 @@ from cosipy.interfaces.event_selection import EventSelectorInterface from cosipy.util.iterables import itertools_batched -from .good_time_interval import GoodTimeInterval -class EventSelectorGTI(EventSelectorInterface): +class MultiTimeSelector(EventSelectorInterface): - def __init__(self, gti:GoodTimeInterval, batch_size:int = 10000): + def __init__(self, tstart_list:Time = None, tstop_list:Time = None, batch_size:int = 10000): """ Assumes events are time-ordered Parameters ---------- - gti: + tstart_list: + tstop_list: batch_size: """ - self._gti = gti + if tstart_list.isscalar == True: + tstart_list = Time([tstart_list]) + if tstop_list.isscalar == True: + tstop_list = Time([tstop_list]) + + self._tstart_list = tstart_list + self._tstop_list = tstop_list self._batch_size = batch_size + + @classmethod + def load_GTI(cls, gti, batch_size:int = 10000): + """ + Instantiate a multi time selector from a good time intervals. + + Parameters + ---------- + gti: + batch_size: + """ + tstart_list = gti.tstart_list + tstop_list = gti.tstop_list + + selector = cls(tstart_list, tstop_list, batch_size) + + return selector def _select(self, event:TimeTagEventInterface) -> bool: # Single event @@ -51,7 +74,10 @@ def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterf time = Time(jd1, jd2, format = 'jd') - selected, gti_index = self._gti.is_in_gti(time) + indices = np.searchsorted(self._tstart_list, time, side='right') - 1 + valid = (indices >= 0) & (indices < len(self._tstop_list)) + result = np.zeros(len(time), dtype=bool) + result[valid] = time[valid] <= self._tstop_list[indices[valid]] - for sel in selected: + for sel in result: yield sel From f64cd6acbfbd3a2333b0c728f00091bc7a0517c2 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Tue, 21 Oct 2025 10:44:01 -0500 Subject: [PATCH 096/133] Modified GoodTimeInterval - removed a method is_is_gti - renamed parameters of tstart and tstop - added property method for tstart and tstop --- cosipy/event_selection/good_time_interval.py | 116 +++++++------------ 1 file changed, 39 insertions(+), 77 deletions(-) diff --git a/cosipy/event_selection/good_time_interval.py b/cosipy/event_selection/good_time_interval.py index 23024a63..ae8e8d14 100644 --- a/cosipy/event_selection/good_time_interval.py +++ b/cosipy/event_selection/good_time_interval.py @@ -4,38 +4,52 @@ class GoodTimeInterval(): - def __init__(self, starts, stops): + def __init__(self, tstart_list, tstop_list): """ Initialize GTI object. Parameters ---------- - starts : astropy.time.Time (array) + tstart_list : astropy.time.Time (array) Start times of GTI intervals - stops : astropy.time.Time (array) + tstop_list : astropy.time.Time (array) Stop times of GTI intervals """ + # Check that starts and stops are scalar + if tstart_list.isscalar == True: + tstart_list = Time([tstart_list]) + if tstop_list.isscalar == True: + tstop_list = Time([tstop_list]) + # Check that starts and stops have the same scale - if not np.all(starts.scale == stops.scale): - raise ValueError(f"Time scale mismatch between starts ({starts.scale}) and stops ({stops.scale})") + if not np.all(tstart_list.scale == tstop_list.scale): + raise ValueError(f"Time scale mismatch between starts ({tstart_list.scale}) and stops ({tstop_list.scale})") # Check that starts and stops have the same format - if starts.format != stops.format: - raise ValueError(f"Time format mismatch between starts ({starts.format}) and stops ({stops.format})") + if tstart_list.format != tstop_list.format: + raise ValueError(f"Time format mismatch between starts ({tstart_list.format}) and stops ({tstop_list.format})") # Check that starts and stops have the same length - if len(starts) != len(stops): - raise ValueError(f"Length mismatch between starts ({len(starts)}) and stops ({len(stops)})") + if len(tstart_list) != len(tstop_list): + raise ValueError(f"Length mismatch between starts ({len(tstart_list)}) and stops ({len(tstop_list)})") - self.starts = starts - self.stops = stops + self._tstart_list = tstart_list + self._tstop_list = tstop_list # Sort by start time self.sort() + + @property + def tstart_list(self): + return self._tstart_list + + @property + def tstop_list(self): + return self._tstop_list def __len__(self): """Return the number of GTI intervals.""" - return len(self.starts) + return len(self._tstart_list) def __getitem__(self, index): """ @@ -49,9 +63,9 @@ def __getitem__(self, index): Returns ------- tuple of (Time, Time) - (starts, stops) for the indexed interval(s) + (tstart_list, tstop_list) for the indexed interval(s) """ - return self.starts[index], self.stops[index] + return self._tstart_list[index], self._tstop_list[index] def __iter__(self): """ @@ -62,7 +76,7 @@ def __iter__(self): tuple of (Time, Time) Each (start, stop) pair """ - for start, stop in zip(self.starts, self.stops): + for start, stop in zip(self._tstart_list, self._tstop_list): yield start, stop def sort(self): @@ -72,61 +86,9 @@ def sort(self): Modifies the GTI in place. Stops are sorted according to the start time order. """ - sort_idx = np.argsort(self.starts) - self.starts = self.starts[sort_idx] - self.stops = self.stops[sort_idx] - - def is_in_gti(self, time): - """ - Check if a time (or list of times) is within any GTI interval. - - Uses binary search for efficiency, assuming GTI is sorted. - - Parameters - ---------- - time : astropy.time.Time - Time or times to check (scalar or array) - Must be in the same time scale as the GTI. - - Returns - ------- - bool or numpy.ndarray of bool - True if time is within GTI, False otherwise. - If input is array, returns array of booleans. - int or numpy.ndarray of int - Index of the GTI interval containing the time(s). - -1 if not in any GTI interval. - """ - # Check time scale - if time.scale != self.starts.scale: - raise ValueError(f"Time scale mismatch. Expected {self.starts.scale.upper()}, " - f"got {time.scale.upper()}") - - # Get values using the format attribute - time_format = self.starts.format - starts_value = getattr(self.starts, time_format) - stops_value = getattr(self.stops, time_format) - times_value = getattr(time, time_format) - - # Check if time is scalar or array - if time.isscalar: - # Single time - idx = np.searchsorted(starts_value, times_value, side='right') - 1 - if idx >= 0 and idx < len(stops_value): - if times_value <= stops_value[idx]: - return True, idx - return False, -1 - else: - # Array of times - vectorized with np.searchsorted - indices = np.searchsorted(starts_value, times_value, side='right') - 1 - - # Check validity and whether times fall within GTI intervals - valid = (indices >= 0) & (indices < len(stops_value)) - result = np.zeros(len(time), dtype=bool) - result[valid] = times_value[valid] <= stops_value[indices[valid]] - indices[~result] = -1 - - return result, indices + sort_idx = np.argsort(self._tstart_list) + self._tstart_list = self._tstart_list[sort_idx] + self._tstop_list = self._tstop_list[sort_idx] def save_as_fits(self, filename, overwrite=False, output_format='unix'): """ @@ -142,14 +104,14 @@ def save_as_fits(self, filename, overwrite=False, output_format='unix'): Time format for output (e.g., 'unix', 'mjd'). Default: 'unix' """ # Get values in the specified output format using getattr - if not hasattr(self.starts, output_format): + if not hasattr(self._tstart_list, output_format): raise ValueError(f"Unsupported output format: {output_format}") - start_times = getattr(self.starts, output_format) - stop_times = getattr(self.stops, output_format) + start_times = getattr(self._tstart_list, output_format) + stop_times = getattr(self._tstop_list, output_format) # Use the scale from the stored Time objects - output_scale = self.starts.scale + output_scale = self._tstart_list.scale # Create primary HDU primary_hdu = fits.PrimaryHDU() @@ -209,8 +171,8 @@ def from_fits(cls, filename): time_format = gti_hdu.header.get('TIMEFORMAT', 'unix').lower() # Read start and stop times as arrays - starts = Time(gti_hdu.data['TSTART'], format=time_format, scale=time_scale) - stops = Time(gti_hdu.data['TSTOP'], format=time_format, scale=time_scale) + tstart_list = Time(gti_hdu.data['TSTART'], format=time_format, scale=time_scale) + tstop_list = Time(gti_hdu.data['TSTOP'], format=time_format, scale=time_scale) infile.close() - return cls(starts, stops) + return cls(tstart_list, tstop_list) From 8716aed2e8f204feede5d2edb0af2394762331b4 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Tue, 21 Oct 2025 10:54:55 -0500 Subject: [PATCH 097/133] Removed unnecessary functions for GoodTimeIntervals for this PR --- cosipy/event_selection/good_time_interval.py | 131 ------------------- 1 file changed, 131 deletions(-) diff --git a/cosipy/event_selection/good_time_interval.py b/cosipy/event_selection/good_time_interval.py index ae8e8d14..2cb34474 100644 --- a/cosipy/event_selection/good_time_interval.py +++ b/cosipy/event_selection/good_time_interval.py @@ -20,18 +20,6 @@ def __init__(self, tstart_list, tstop_list): tstart_list = Time([tstart_list]) if tstop_list.isscalar == True: tstop_list = Time([tstop_list]) - - # Check that starts and stops have the same scale - if not np.all(tstart_list.scale == tstop_list.scale): - raise ValueError(f"Time scale mismatch between starts ({tstart_list.scale}) and stops ({tstop_list.scale})") - - # Check that starts and stops have the same format - if tstart_list.format != tstop_list.format: - raise ValueError(f"Time format mismatch between starts ({tstart_list.format}) and stops ({tstop_list.format})") - - # Check that starts and stops have the same length - if len(tstart_list) != len(tstop_list): - raise ValueError(f"Length mismatch between starts ({len(tstart_list)}) and stops ({len(tstop_list)})") self._tstart_list = tstart_list self._tstop_list = tstop_list @@ -47,38 +35,6 @@ def tstart_list(self): def tstop_list(self): return self._tstop_list - def __len__(self): - """Return the number of GTI intervals.""" - return len(self._tstart_list) - - def __getitem__(self, index): - """ - Get GTI interval(s) by index. - - Parameters - ---------- - index : int, slice, or array-like - Index, slice, or boolean/integer array to retrieve - - Returns - ------- - tuple of (Time, Time) - (tstart_list, tstop_list) for the indexed interval(s) - """ - return self._tstart_list[index], self._tstop_list[index] - - def __iter__(self): - """ - Iterate over GTI intervals. - - Yields - ------ - tuple of (Time, Time) - Each (start, stop) pair - """ - for start, stop in zip(self._tstart_list, self._tstop_list): - yield start, stop - def sort(self): """ Sort GTI by start time in ascending order. @@ -89,90 +45,3 @@ def sort(self): sort_idx = np.argsort(self._tstart_list) self._tstart_list = self._tstart_list[sort_idx] self._tstop_list = self._tstop_list[sort_idx] - - def save_as_fits(self, filename, overwrite=False, output_format='unix'): - """ - Save GTI data to a FITS file. - - Parameters - ---------- - filename : str - Output FITS filename - overwrite : bool, optional - If True, overwrite existing file (default: False) - output_format : str, optional - Time format for output (e.g., 'unix', 'mjd'). Default: 'unix' - """ - # Get values in the specified output format using getattr - if not hasattr(self._tstart_list, output_format): - raise ValueError(f"Unsupported output format: {output_format}") - - start_times = getattr(self._tstart_list, output_format) - stop_times = getattr(self._tstop_list, output_format) - - # Use the scale from the stored Time objects - output_scale = self._tstart_list.scale - - # Create primary HDU - primary_hdu = fits.PrimaryHDU() - primary_hdu.header['TIMESYS'] = output_scale.upper() - output_unit = 's' - if output_format in ['jd', 'mjd']: - output_unit = 'd' - primary_hdu.header['TIMEUNIT'] = output_unit - - # Define table columns - col1 = fits.Column(name='TSTART', format='D', unit=output_unit, array=start_times) - col2 = fits.Column(name='TSTOP', format='D', unit=output_unit, array=stop_times) - - # Create table HDU - table_hdu = fits.BinTableHDU.from_columns([col1, col2]) - table_hdu.header['EXTNAME'] = 'GTI' - table_hdu.header['TIMESYS'] = output_scale.upper() - table_hdu.header['TIMEUNIT'] = output_unit - table_hdu.header['TIMEFORMAT'] = output_format - - # Create HDUList and write to FITS file - hdul = fits.HDUList([primary_hdu, table_hdu]) - hdul.writeto(filename, overwrite=overwrite) - - @classmethod - def from_fits(cls, filename): - """ - Load GTI from a FITS file. - - Reads time format and scale from FITS header. - - Parameters - ---------- - filename : str - Input FITS filename - - Returns - ------- - GoodTimeIntervals - GTI object - """ - infile = fits.open(filename) - - # Search for GTI extension - gti_hdu = None - for hdu in infile: - if isinstance(hdu, fits.BinTableHDU) and hdu.name in ['GTI']: - gti_hdu = hdu - break - - if gti_hdu is None: - infile.close() - raise ValueError("GTI table not found in FITS file") - - # Read time system/format from header - time_scale = gti_hdu.header.get('TIMESYS', 'utc').lower() - time_format = gti_hdu.header.get('TIMEFORMAT', 'unix').lower() - - # Read start and stop times as arrays - tstart_list = Time(gti_hdu.data['TSTART'], format=time_format, scale=time_scale) - tstop_list = Time(gti_hdu.data['TSTOP'], format=time_format, scale=time_scale) - - infile.close() - return cls(tstart_list, tstop_list) From a1b0e8a6fb079bd0e3f2e6c5785163da891f23ad Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 22 Oct 2025 15:50:03 -0400 Subject: [PATCH 098/133] Fix issue with sum of expectations densities. Caught by Pascal. Signed-off-by: Israel Martinez --- cosipy/threeml/unbinned_model_folding.py | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/cosipy/threeml/unbinned_model_folding.py b/cosipy/threeml/unbinned_model_folding.py index 297b9441..8e448bb0 100644 --- a/cosipy/threeml/unbinned_model_folding.py +++ b/cosipy/threeml/unbinned_model_folding.py @@ -56,22 +56,11 @@ def expected_counts(self) -> float: return sum(s.expected_counts() for s in self._source_responses.values()) - def event_probability(self) -> Iterable[float]: + def expectation_density(self) -> Iterable[float]: """ - Return the expected number of counts density from the start-th event - to the stop-th event. - - Parameters - ---------- - start : None | int - From beginning by default - stop: None|int - Until the end by default + Sum of expectation density """ self._cache_source_responses() - sources_expectation_iter = itertools.product(*(s.expectation_density() for s in self._source_responses.values())) - ncounts = self.expected_counts() - - return [sum(expectations)/ncounts for expectations in sources_expectation_iter] + return [sum(expectations) for expectations in zip(*(s.expectation_density() for s in self._source_responses.values()))] From b68d24279612fe2301c45cb5132b06da705b1bd0 Mon Sep 17 00:00:00 2001 From: GallegoSav Date: Wed, 22 Oct 2025 23:03:01 +0200 Subject: [PATCH 099/133] commit --- .../free_norm_threeml_binned_bkg.py | 2 +- ...mple_crab_fit_threeml_plugin_interfaces.py | 225 +++++++++++++----- 2 files changed, 173 insertions(+), 54 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index 9a1e98f6..abeb2561 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -56,7 +56,7 @@ def __init__(self, # Multiple label components. self._distributions = distribution self._norms = np.ones(self.ncomponents) # Hz Each component - self._norm = len(self.labels) # Hz. Total + self._norm = np.sum(self._norms) # Hz. Total self._labels = tuple(self._distributions.keys()) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index d8ba6955..664ead8a 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -55,6 +55,7 @@ from cosipy.background_estimation import FreeNormBinnedBackground from cosipy.interfaces import ThreeMLPluginInterface from cosipy.response import BinnedThreeMLModelFolding, BinnedInstrumentResponse, BinnedThreeMLPointSourceResponse +from cosipy.data_io import EmCDSBinnedData import sys @@ -81,60 +82,80 @@ def main(): # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into - # In[2]: + data_path = Path("/localscratch/sgallego/linkToXauron/COSIpyData/DC3_data") # /path/to/files. Current dir by default - data_path = Path("") # /path/to/files. Current dir by default + # Download the orientation file - # Download the orientation file (684.38 MB) + # In[ ]: + sc_orientation_path = data_path / "DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori" + #fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', + # output=str(sc_orientation_path), checksum = 'e5e71e3528e39b855b0e4f74a1a2eebe') - # In[ ]: + # Download the binned Crab data - sc_orientation_path = data_path / "20280301_3_month_with_orbital_info.ori" - fetch_wasabi_file('COSI-SMEX/DC2/Data/Orientation/20280301_3_month_with_orbital_info.ori', - output=str(sc_orientation_path), checksum = '416fcc296fc37a056a069378a2d30cb2') + # In[7]: - # Download the binned Crab+background data (99.16 MB) + #crab_data_path = data_path / "Binneddata_crab_powerlaw.extracted.filtered.tra.gz.fits.gz.hdf5" + crab_data_path = data_path / "crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5" - # In[5]: + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5', + # output=str(crab_data_path), checksum = '405862396dea2be79d7892d6d5bb50d8') - crab_bkg_data_path = data_path / "crab_bkg_binned_data.hdf5" - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_bkg_binned_data.hdf5', - output=str(crab_bkg_data_path), checksum = '85658e102414c4f746e64a7d29c607a4') + + #bkg_data_path = data_path / "Total_BG_3months_binned_data_filtered_with_SAAcut_SAAreducedHEPD01_DC3binning.hdf5" + bkg_data_path = [ data_path / "PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", + data_path / "PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", + data_path / "AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", + data_path / "AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", + data_path / "CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5", + data_path / "GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5", + data_path / "SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5", + data_path / "SecondaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", + data_path / "SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5" + ] - # Download the binned Crab data (13.16 MB) + # Download the binned background data + + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '7597f04210e59340a0888c66fc5cbc63') - # In[7]: + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '76a68da730622851b8e1c749248c3b40') - crab_data_path = data_path / "crab_binned_data.hdf5" - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_binned_data.hdf5', - output=str(crab_data_path), checksum = '6e5bccb48556bdbd259519c52dec9dcb') + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '76c58361d2c9b43b66ef2e41c18939c4') - # Download the binned background data (89.10 MB) + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '8f3cb418c637b839665a4fcbd000d2eb') - # In[9]: + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '93c4619b383572d318328e6380e35a70') - bkg_data_path = data_path / "bkg_binned_data.hdf5" - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/bkg_binned_data.hdf5', - output=str(bkg_data_path), checksum = '54221d8556eb4ef520ef61da8083e7f4') + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = 'd0415d4d04b040af47f23f5d08cb7d64') - # Download the response file (596.06 MB) + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '78aefa46707c98563294a898a62845c1') + + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryProtons_3months_binned_data_filtered_with_SAAcut.hdf5', + # output=str(crab_data_path), checksum = '5fec2212dcdbb4c43c3ac02f02524f68') - # In[10]: - # Before and after Jeremy's changes - dr_path = data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5" # path to detector response - fetch_wasabi_file( - 'COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', - output=str(dr_path), - checksum='eb72400a1279325e9404110f909c7785') + #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', + # output=str(crab_data_path), checksum = 'fc69fbbfd94cd595f57a8b11fc721169') - # dr_path = str(data_path / "SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5") # path to detector response - # fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/SMEXv12.Continuum.HEALPixO3_10bins_log_flat.binnedimaging.imagingresponse.h5', - # output=str(dr_path), checksum = 'eb72400a1279325e9404110f909c7785') + + # Download the response file + + + dr_path = data_path / "ResponseContinuum_JeremyUpgrade.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5" + + # fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5', + # output=str(dr_path), checksum = '16fe005d3ab924ad98322b6579aabf2a') # Read in the spacecraft orientation file @@ -151,8 +172,16 @@ def main(): crab = BinnedData(data_path / "crab.yaml") - crab_bkg = BinnedData(data_path / "crab.yaml") - bkg = BinnedData(data_path / "background.yaml") + #crab_bkg = BinnedData(data_path / "crab.yaml") + bkg_PrimaryProtons = BinnedData(data_path / "background.yaml") + bkg_PrimaryAlphas = BinnedData(data_path / "background.yaml") + bkg_AlbedoPhotons = BinnedData(data_path / "background.yaml") + bkg_AlbedoNeutrons = BinnedData(data_path / "background.yaml") + bkg_CosmicPhotons = BinnedData(data_path / "background.yaml") + bkg_Cosmicdiffuse = BinnedData(data_path / "background.yaml") + bkg_SecondaryProtons = BinnedData(data_path / "background.yaml") + bkg_SecondaryPositrons = BinnedData(data_path / "background.yaml") + bkg_SAAprotons = BinnedData(data_path / "background.yaml") # Load binned .hdf5 files @@ -160,14 +189,20 @@ def main(): # In[6]: crab.load_binned_data_from_hdf5(binned_data=crab_data_path) - crab_bkg.load_binned_data_from_hdf5(binned_data=crab_bkg_data_path) - bkg.load_binned_data_from_hdf5(binned_data=bkg_data_path) - + #crab_bkg.load_binned_data_from_hdf5(binned_data=crab_bkg_data_path) + + #load all your bck files + bkg_PrimaryProtons.load_binned_data_from_hdf5(binned_data=bkg_data_path[0]) + bkg_PrimaryAlphas.load_binned_data_from_hdf5(binned_data=bkg_data_path[1]) + bkg_AlbedoPhotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[2]) + bkg_AlbedoNeutrons.load_binned_data_from_hdf5(binned_data=bkg_data_path[3]) + bkg_CosmicPhotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[4]) + bkg_Cosmicdiffuse.load_binned_data_from_hdf5(binned_data=bkg_data_path[5]) + bkg_SecondaryPositrons.load_binned_data_from_hdf5(binned_data=bkg_data_path[6]) + bkg_SecondaryProtons.load_binned_data_from_hdf5(binned_data=bkg_data_path[7]) + bkg_SAAprotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[8]) # Define the path to the detector response - - # In[7]: - # ## Perform spectral fit # ============ Interfaces ============== @@ -181,15 +216,36 @@ def main(): # In[8]: - bkg_dist = bkg.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_dist = {"PrimaryProtons":bkg_PrimaryProtons.binned_data.project('Em', 'Phi', 'PsiChi'), + "PrimaryAlphas":bkg_PrimaryAlphas.binned_data.project('Em', 'Phi', 'PsiChi'), + "AlbedoPhotons":bkg_AlbedoPhotons.binned_data.project('Em', 'Phi', 'PsiChi'), + "AlbedoNeutrons":bkg_AlbedoNeutrons.binned_data.project('Em', 'Phi', 'PsiChi'), + "CosmicPhotons":bkg_CosmicPhotons.binned_data.project('Em', 'Phi', 'PsiChi'), + "CosmicDiffuse":bkg_Cosmicdiffuse.binned_data.project('Em', 'Phi', 'PsiChi'), + "SecondaryPositrons":bkg_SecondaryPositrons.binned_data.project('Em', 'Phi', 'PsiChi'), + "SecondaryProtons":bkg_SecondaryProtons.binned_data.project('Em', 'Phi', 'PsiChi'), + "SAAprotons":bkg_SAAprotons.binned_data.project('Em', 'Phi', 'PsiChi') + + } + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and # it was added to the expectation, not the normalized bkg - bkg_dist += sys.float_info.min - - data = crab_bkg.get_em_cds() - + for bckfile in bkg_dist.keys() : + bkg_dist[bckfile] += sys.float_info.min + + #combine the data + the bck like we would get for real data + data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_PrimaryProtons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_PrimaryAlphas.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_AlbedoPhotons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_AlbedoNeutrons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_CosmicPhotons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_Cosmicdiffuse.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_SecondaryPositrons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_SecondaryProtons.binned_data.project('Em', 'Phi', 'PsiChi') + + bkg_SAAprotons.binned_data.project('Em', 'Phi', 'PsiChi') + ) bkg = FreeNormBinnedBackground(bkg_dist, sc_history=sc_orientation, copy = False) @@ -218,11 +274,72 @@ def main(): bkg) # Nuisance parameter guess, bounds, etc. - cosi.bkg_parameter['bkg_norm'] = Parameter("bkg_norm", # background parameter + cosi.bkg_parameter['PrimaryProtons'] = Parameter("PrimaryProtons", # background parameter + 1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + unit = u.Hz + ) + + cosi.bkg_parameter['PrimaryAlphas'] = Parameter("PrimaryAlphas", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + cosi.bkg_parameter['AlbedoPhotons'] = Parameter("AlbedoPhotons", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=20, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + + cosi.bkg_parameter['AlbedoNeutrons'] = Parameter("AlbedoNeutrons", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + cosi.bkg_parameter['CosmicPhotons'] = Parameter("CosmicPhotons", # background parameter 1, # initial value of parameter unit = u.Hz, min_value=0, # minimum value of parameter - max_value=100, # maximum value of parameter + max_value=20, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + cosi.bkg_parameter['CosmicDiffuse'] = Parameter("CosmicDiffuse", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=20, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + cosi.bkg_parameter['SecondaryPositrons'] = Parameter("SecondaryPositrons", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + cosi.bkg_parameter['SecondaryProtons'] = Parameter("SecondaryProtons", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + ) + + cosi.bkg_parameter['SAAprotons'] = Parameter("SAAprotons", # background parameter + 1, # initial value of parameter + unit = u.Hz, + min_value=0, # minimum value of parameter + max_value=10, # maximum value of parameter delta=0.05, # initial step used by fitting engine ) @@ -252,6 +369,7 @@ def main(): spectrum.beta.min_value = -5.0 spectrum.beta.max_value = -2.15 spectrum.xp.min_value = 1.0 + spectrum.K.min_value = 1e-10 spectrum.alpha.value = alpha spectrum.beta.value = beta @@ -316,6 +434,7 @@ def main(): spectrum_inj.beta.min_value = -5.0 spectrum_inj.beta.max_value = -2.15 spectrum_inj.xp.min_value = 1.0 + spectrum_inj.K.min_value = 1e-10 spectrum_inj.alpha.value = alpha_inj spectrum_inj.beta.value = beta_inj @@ -405,7 +524,7 @@ def main(): ax.set_ylim(.1,100) - plt.show() + #plt.show() # Plot the fitted spectrum convolved with the response, as well as the simulated source counts @@ -428,7 +547,7 @@ def main(): ax.legend() - plt.show() + #plt.show() # Plot the fitted spectrum convolved with the response plus the fitted background, as well as the simulated source+background counts @@ -441,8 +560,8 @@ def main(): ax.stairs(expectation.project('Em').todense().contents + expectation_bkg.project('Em').todense().contents, binned_energy_edges, color='purple', label = "Best fit convolved with response plus background") ax.errorbar(binned_energy, expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents, yerr=np.sqrt(expectation.project('Em').todense().contents+expectation_bkg.project('Em').todense().contents), color='purple', linewidth=0, elinewidth=1) - ax.stairs(crab_bkg.binned_data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") - ax.errorbar(binned_energy, crab_bkg.binned_data.project('Em').todense().contents, yerr=np.sqrt(crab_bkg.binned_data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) + ax.stairs(data.data.project('Em').todense().contents, binned_energy_edges, color = 'black', ls = ":", label = "Total counts") + ax.errorbar(binned_energy, data.data.project('Em').todense().contents, yerr=np.sqrt(data.data.project('Em').todense().contents), color='black', linewidth=0, elinewidth=1) ax.set_xscale("log") ax.set_yscale("log") From c9281dc7a4ba75d1fcfe1ddfe22bd8cce69fbb4a Mon Sep 17 00:00:00 2001 From: GallegoSav <123730578+GallegoSav@users.noreply.github.com> Date: Wed, 22 Oct 2025 23:19:45 +0200 Subject: [PATCH 100/133] Update example_crab_fit_threeml_plugin_interfaces.py --- ...mple_crab_fit_threeml_plugin_interfaces.py | 51 ++++++++++--------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index 664ead8a..e7d83470 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -98,14 +98,12 @@ def main(): # In[7]: - #crab_data_path = data_path / "Binneddata_crab_powerlaw.extracted.filtered.tra.gz.fits.gz.hdf5" crab_data_path = data_path / "crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5" - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5', - # output=str(crab_data_path), checksum = '405862396dea2be79d7892d6d5bb50d8') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5', + output=str(crab_data_path), checksum = '405862396dea2be79d7892d6d5bb50d8') - #bkg_data_path = data_path / "Total_BG_3months_binned_data_filtered_with_SAAcut_SAAreducedHEPD01_DC3binning.hdf5" bkg_data_path = [ data_path / "PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", data_path / "PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", data_path / "AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", @@ -116,46 +114,49 @@ def main(): data_path / "SecondaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", data_path / "SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5" ] + # Download the binned background data - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '7597f04210e59340a0888c66fc5cbc63') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '7597f04210e59340a0888c66fc5cbc63') + + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '76a68da730622851b8e1c749248c3b40') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '76a68da730622851b8e1c749248c3b40') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '76c58361d2c9b43b66ef2e41c18939c4') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '76c58361d2c9b43b66ef2e41c18939c4') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '8f3cb418c637b839665a4fcbd000d2eb') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '8f3cb418c637b839665a4fcbd000d2eb') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '93c4619b383572d318328e6380e35a70') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '93c4619b383572d318328e6380e35a70') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = 'd0415d4d04b040af47f23f5d08cb7d64') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = 'd0415d4d04b040af47f23f5d08cb7d64') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '78aefa46707c98563294a898a62845c1') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '78aefa46707c98563294a898a62845c1') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryProtons_3months_binned_data_filtered_with_SAAcut.hdf5', + output=str(crab_data_path), checksum = '5fec2212dcdbb4c43c3ac02f02524f68') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryProtons_3months_binned_data_filtered_with_SAAcut.hdf5', - # output=str(crab_data_path), checksum = '5fec2212dcdbb4c43c3ac02f02524f68') + fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', + output=str(crab_data_path), checksum = 'fc69fbbfd94cd595f57a8b11fc721169') - #fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', - # output=str(crab_data_path), checksum = 'fc69fbbfd94cd595f57a8b11fc721169') + # Download the response file dr_path = data_path / "ResponseContinuum_JeremyUpgrade.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5" - # fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5', - # output=str(dr_path), checksum = '16fe005d3ab924ad98322b6579aabf2a') + fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5', + output=str(dr_path), checksum = '16fe005d3ab924ad98322b6579aabf2a') # Read in the spacecraft orientation file @@ -580,4 +581,4 @@ def main(): cProfile.run('main()', filename = "prof_interfaces.prof") exit() - main() \ No newline at end of file + main() From a115f5bad6fad90c1a1565a325ac843689cab2a9 Mon Sep 17 00:00:00 2001 From: GallegoSav <123730578+GallegoSav@users.noreply.github.com> Date: Wed, 22 Oct 2025 23:21:25 +0200 Subject: [PATCH 101/133] Update example_crab_fit_threeml_plugin_interfaces.py --- .../examples/crab/example_crab_fit_threeml_plugin_interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index e7d83470..d879a64e 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -82,7 +82,7 @@ def main(): # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into - data_path = Path("/localscratch/sgallego/linkToXauron/COSIpyData/DC3_data") # /path/to/files. Current dir by default + data_path = Path("") # /path/to/files. Current dir by default # Download the orientation file From 6af64cec588e9d3dd7a47db3904d237fba021c86 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 22 Oct 2025 23:38:43 -0400 Subject: [PATCH 102/133] Fix bug preventing arbitrary name for single component bkg Signed-off-by: Israel Martinez --- .../free_norm_threeml_binned_bkg.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py index abeb2561..f4e11dc1 100644 --- a/cosipy/background_estimation/free_norm_threeml_binned_bkg.py +++ b/cosipy/background_estimation/free_norm_threeml_binned_bkg.py @@ -28,11 +28,10 @@ class FreeNormBackground(BackgroundInterface): This must translate to/from regular parameters with arbitrary type from/to 3ML parameters - Parameter names are "{label}_norm". Default to "bkg_norm" is there was a single - unlabeled component + Default to "bkg_norm" is there was a single unlabeled component """ - _default_label = 'bkg' + _default_label = 'bkg_norm' def __init__(self, distribution:Union[Histogram, Dict[str, Histogram]], @@ -52,11 +51,13 @@ def __init__(self, self._distributions = {self._default_label: distribution} self._norms = np.ones(1) # Hz. Each component self._norm = 1 # Hz. Total + self._single_component = True else: # Multiple label components. self._distributions = distribution self._norms = np.ones(self.ncomponents) # Hz Each component self._norm = np.sum(self._norms) # Hz. Total + self._single_component = False self._labels = tuple(self._distributions.keys()) @@ -89,10 +90,6 @@ def __init__(self, if self._axes != bkg.axes: raise ValueError("All background components mus have the same axes") - @property - def _single_component(self): - return self.ncomponents == 1 - @property def norm(self): """ @@ -104,7 +101,7 @@ def norm(self): @property def norms(self): if self._single_component: - return {f"{self._default_label}_norm": u.Quantity(self._norms[0], u.Hz)} + return {self._default_label: u.Quantity(self._norms[0], u.Hz)} else: return {l:u.Quantity(n, u.Hz, copy = False) for l,n in zip(self.labels,self._norms)} @@ -124,7 +121,7 @@ def set_norm(self, norm: Union[u.Quantity, Dict[str, u.Quantity]]): if self._single_component: if isinstance(norm, dict): - self._norms[0] = norm[f'{self._default_label}_norm'].to_value(u.Hz) + self._norms[0] = norm[self._default_label].to_value(u.Hz) else: self._norms[0] = norm.to_value(u.Hz) else: From 0be411fc89796015b774b789b52fca06d0c21d3d Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 22 Oct 2025 23:39:00 -0400 Subject: [PATCH 103/133] Single or multiple bkg Signed-off-by: Israel Martinez --- ...mple_crab_fit_threeml_plugin_interfaces.py | 220 ++++-------------- 1 file changed, 47 insertions(+), 173 deletions(-) diff --git a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py index d879a64e..a25d823f 100644 --- a/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py +++ b/docs/api/interfaces/examples/crab/example_crab_fit_threeml_plugin_interfaces.py @@ -78,6 +78,8 @@ def main(): + single_bkg_fit = True + # ## Download and read in binned data # Define the path to the directory containing the data, detector response, orientation file, and yaml files if they have already been downloaded, or the directory to download the files into @@ -91,72 +93,37 @@ def main(): # In[ ]: sc_orientation_path = data_path / "DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori" - #fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', - # output=str(sc_orientation_path), checksum = 'e5e71e3528e39b855b0e4f74a1a2eebe') + fetch_wasabi_file('COSI-SMEX/DC3/Data/Orientation/DC3_final_530km_3_month_with_slew_15sbins_GalacticEarth_SAA.ori', + output=sc_orientation_path, checksum = 'e5e71e3528e39b855b0e4f74a1a2eebe') # Download the binned Crab data # In[7]: crab_data_path = data_path / "crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5" - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/crab_standard_3months_binned_data_filtered_with_SAAcut.fits.gz.hdf5', - output=str(crab_data_path), checksum = '405862396dea2be79d7892d6d5bb50d8') - - - bkg_data_path = [ data_path / "PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", - data_path / "PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", - data_path / "AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", - data_path / "AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", - data_path / "CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5", - data_path / "GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5", - data_path / "SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5", - data_path / "SecondaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5", - data_path / "SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5" - ] - + output=crab_data_path, checksum = '405862396dea2be79d7892d6d5bb50d8') + + bkg_components = {"PrimaryProtons":{'filename':'PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum':'7597f04210e59340a0888c66fc5cbc63'}, + "PrimaryAlphas": {'filename': 'PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '76a68da730622851b8e1c749248c3b40'}, + "AlbedoPhotons": {'filename': 'AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '76c58361d2c9b43b66ef2e41c18939c4'}, + "AlbedoNeutrons": {'filename': 'AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', 'checksum': '8f3cb418c637b839665a4fcbd000d2eb'}, + "CosmicPhotons": {'filename': 'CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': '93c4619b383572d318328e6380e35a70'}, + "CosmicDiffuse": {'filename': 'GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': 'd0415d4d04b040af47f23f5d08cb7d64'}, + "SecondaryPositrons": {'filename': 'SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', 'checksum': '5fec2212dcdbb4c43c3ac02f02524f68'}, + "SecondaryProtons": {'filename': 'SecondaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.fits.gz.hdf5', 'checksum': '78aefa46707c98563294a898a62845c1'}, + "SAAprotons": {'filename': 'SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', 'checksum': 'fc69fbbfd94cd595f57a8b11fc721169'}, + } # Download the binned background data - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryProtons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '7597f04210e59340a0888c66fc5cbc63') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/PrimaryAlphas_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '76a68da730622851b8e1c749248c3b40') - - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoPhotons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '76c58361d2c9b43b66ef2e41c18939c4') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/AlbedoNeutrons_WithDetCstbinned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '8f3cb418c637b839665a4fcbd000d2eb') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/CosmicPhotons_3months_binned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '93c4619b383572d318328e6380e35a70') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/GalTotal_SA100_F98_3months_binned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = 'd0415d4d04b040af47f23f5d08cb7d64') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryPositrons_3months_binned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '78aefa46707c98563294a898a62845c1') - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SecondaryProtons_3months_binned_data_filtered_with_SAAcut.hdf5', - output=str(crab_data_path), checksum = '5fec2212dcdbb4c43c3ac02f02524f68') + for bkg in bkg_components.values(): + wasabi_path = 'COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/'+bkg['filename'] + fetch_wasabi_file(wasabi_path, output=data_path/bkg['filename'], checksum = bkg['checksum']) - - fetch_wasabi_file('COSI-SMEX/cosipy_tutorials/crab_spectral_fit_galactic_frame/SAA_3months_unbinned_data_filtered_with_SAAcut_statreduced_akaHEPD01result.hdf5', - output=str(crab_data_path), checksum = 'fc69fbbfd94cd595f57a8b11fc721169') - - - - - # Download the response file - - - dr_path = data_path / "ResponseContinuum_JeremyUpgrade.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5" - + # Download the response file + dr_path = data_path / "ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5" fetch_wasabi_file('COSI-SMEX/develop/Data/Responses/ResponseContinuum.o3.e100_10000.b10log.s10396905069491.m2284.filtered.nonsparse.binnedimaging.imagingresponse.h5', - output=str(dr_path), checksum = '16fe005d3ab924ad98322b6579aabf2a') + output=str(dr_path), checksum = '7121f094be50e7bfe9b31e53015b0e85') # Read in the spacecraft orientation file @@ -173,62 +140,41 @@ def main(): crab = BinnedData(data_path / "crab.yaml") - #crab_bkg = BinnedData(data_path / "crab.yaml") - bkg_PrimaryProtons = BinnedData(data_path / "background.yaml") - bkg_PrimaryAlphas = BinnedData(data_path / "background.yaml") - bkg_AlbedoPhotons = BinnedData(data_path / "background.yaml") - bkg_AlbedoNeutrons = BinnedData(data_path / "background.yaml") - bkg_CosmicPhotons = BinnedData(data_path / "background.yaml") - bkg_Cosmicdiffuse = BinnedData(data_path / "background.yaml") - bkg_SecondaryProtons = BinnedData(data_path / "background.yaml") - bkg_SecondaryPositrons = BinnedData(data_path / "background.yaml") - bkg_SAAprotons = BinnedData(data_path / "background.yaml") + crab.load_binned_data_from_hdf5(binned_data=crab_data_path) + for bkg in bkg_components.values(): + binned_data = BinnedData(data_path / "background.yaml") + binned_data.load_binned_data_from_hdf5(binned_data=data_path/bkg['filename']) + bkg['dist'] = binned_data.binned_data.project('Em', 'Phi', 'PsiChi') # Load binned .hdf5 files # In[6]: - crab.load_binned_data_from_hdf5(binned_data=crab_data_path) - #crab_bkg.load_binned_data_from_hdf5(binned_data=crab_bkg_data_path) - - #load all your bck files - bkg_PrimaryProtons.load_binned_data_from_hdf5(binned_data=bkg_data_path[0]) - bkg_PrimaryAlphas.load_binned_data_from_hdf5(binned_data=bkg_data_path[1]) - bkg_AlbedoPhotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[2]) - bkg_AlbedoNeutrons.load_binned_data_from_hdf5(binned_data=bkg_data_path[3]) - bkg_CosmicPhotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[4]) - bkg_Cosmicdiffuse.load_binned_data_from_hdf5(binned_data=bkg_data_path[5]) - bkg_SecondaryPositrons.load_binned_data_from_hdf5(binned_data=bkg_data_path[6]) - bkg_SecondaryProtons.load_binned_data_from_hdf5(binned_data=bkg_data_path[7]) - bkg_SAAprotons.load_binned_data_from_hdf5(binned_data=bkg_data_path[8]) # Define the path to the detector response # ## Perform spectral fit # ============ Interfaces ============== - output_suffix = 'interfaces' - dr = FullDetectorResponse.open(dr_path) instrument_response = BinnedInstrumentResponse(dr) # Set background parameter, which is used to fit the amplitude of the background, and instantiate the COSI 3ML plugin # In[8]: + total_bkg = None + for bkg in bkg_components.values(): + if total_bkg is None: + total_bkg = bkg['dist'] + else: + total_bkg = total_bkg + bkg['dist'] # Issues with in-place operations for sparse contents + + if single_bkg_fit: + bkg_dist = {"total_bkg":total_bkg} + else: + bkg_dist = {l: b['dist'] for l, b in bkg_components.items()} - bkg_dist = {"PrimaryProtons":bkg_PrimaryProtons.binned_data.project('Em', 'Phi', 'PsiChi'), - "PrimaryAlphas":bkg_PrimaryAlphas.binned_data.project('Em', 'Phi', 'PsiChi'), - "AlbedoPhotons":bkg_AlbedoPhotons.binned_data.project('Em', 'Phi', 'PsiChi'), - "AlbedoNeutrons":bkg_AlbedoNeutrons.binned_data.project('Em', 'Phi', 'PsiChi'), - "CosmicPhotons":bkg_CosmicPhotons.binned_data.project('Em', 'Phi', 'PsiChi'), - "CosmicDiffuse":bkg_Cosmicdiffuse.binned_data.project('Em', 'Phi', 'PsiChi'), - "SecondaryPositrons":bkg_SecondaryPositrons.binned_data.project('Em', 'Phi', 'PsiChi'), - "SecondaryProtons":bkg_SecondaryProtons.binned_data.project('Em', 'Phi', 'PsiChi'), - "SAAprotons":bkg_SAAprotons.binned_data.project('Em', 'Phi', 'PsiChi') - - } - # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and # it was added to the expectation, not the normalized bkg @@ -236,17 +182,7 @@ def main(): bkg_dist[bckfile] += sys.float_info.min #combine the data + the bck like we would get for real data - data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_PrimaryProtons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_PrimaryAlphas.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_AlbedoPhotons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_AlbedoNeutrons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_CosmicPhotons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_Cosmicdiffuse.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_SecondaryPositrons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_SecondaryProtons.binned_data.project('Em', 'Phi', 'PsiChi') - + bkg_SAAprotons.binned_data.project('Em', 'Phi', 'PsiChi') - ) + data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') + total_bkg) bkg = FreeNormBinnedBackground(bkg_dist, sc_history=sc_orientation, copy = False) @@ -275,75 +211,14 @@ def main(): bkg) # Nuisance parameter guess, bounds, etc. - cosi.bkg_parameter['PrimaryProtons'] = Parameter("PrimaryProtons", # background parameter - 1, # initial value of parameter - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - unit = u.Hz - ) - - cosi.bkg_parameter['PrimaryAlphas'] = Parameter("PrimaryAlphas", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - - cosi.bkg_parameter['AlbedoPhotons'] = Parameter("AlbedoPhotons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=20, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - - - cosi.bkg_parameter['AlbedoNeutrons'] = Parameter("AlbedoNeutrons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - cosi.bkg_parameter['CosmicPhotons'] = Parameter("CosmicPhotons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=20, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - cosi.bkg_parameter['CosmicDiffuse'] = Parameter("CosmicDiffuse", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=20, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - cosi.bkg_parameter['SecondaryPositrons'] = Parameter("SecondaryPositrons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - cosi.bkg_parameter['SecondaryProtons'] = Parameter("SecondaryProtons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - - cosi.bkg_parameter['SAAprotons'] = Parameter("SAAprotons", # background parameter - 1, # initial value of parameter - unit = u.Hz, - min_value=0, # minimum value of parameter - max_value=10, # maximum value of parameter - delta=0.05, # initial step used by fitting engine - ) - + for bkg_label in bkg_dist.keys(): + cosi.bkg_parameter[bkg_label] = Parameter(bkg_label, # background parameter + 1, # initial value of parameter + min_value=0, # minimum value of parameter + max_value= 100 if single_bkg_fit else 20, # maximum value of parameter + delta=0.05, # initial step used by fitting engine + unit = u.Hz + ) # ======== Interfaces end ========== @@ -352,7 +227,6 @@ def main(): # In[9]: - l = 184.56 b = -5.78 From 9dfacba47cb1ea4f15d28a506c70ccc62b0b3ba5 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 23 Oct 2025 08:38:38 -0400 Subject: [PATCH 104/133] update histpy version to the one with timeaxis Signed-off-by: Israel Martinez --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7e38244b..fa303ed7 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ author_email='imc@umd.edu', url='https://github.com/cositools/cosipy', packages = find_packages(include=["cosipy", "cosipy.*"]), - install_requires = ['histpy>=2.0.3', + install_requires = ['histpy>=2.0.5', 'h5py', 'hdf5plugin', 'mhealpy', From 007eb4401cc0d1496208d1936f0b7bacc3839357 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Fri, 24 Oct 2025 23:57:14 +0900 Subject: [PATCH 105/133] sort -> _sort (GTI) --- cosipy/event_selection/good_time_interval.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cosipy/event_selection/good_time_interval.py b/cosipy/event_selection/good_time_interval.py index 2cb34474..ae846670 100644 --- a/cosipy/event_selection/good_time_interval.py +++ b/cosipy/event_selection/good_time_interval.py @@ -25,7 +25,7 @@ def __init__(self, tstart_list, tstop_list): self._tstop_list = tstop_list # Sort by start time - self.sort() + self._sort() @property def tstart_list(self): @@ -35,7 +35,7 @@ def tstart_list(self): def tstop_list(self): return self._tstop_list - def sort(self): + def _sort(self): """ Sort GTI by start time in ascending order. From 4836366752e4efce53a9708e4c1d59f06e178d01 Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Fri, 24 Oct 2025 23:59:19 +0900 Subject: [PATCH 106/133] Replace an original TimeSelector with MultiTimeSelector;MultiTimeSelector is renamed as TimeSelector --- .../event_selection/multi_time_selection.py | 83 -------------- cosipy/event_selection/time_selection.py | 104 +++++++++++++++--- 2 files changed, 90 insertions(+), 97 deletions(-) delete mode 100644 cosipy/event_selection/multi_time_selection.py diff --git a/cosipy/event_selection/multi_time_selection.py b/cosipy/event_selection/multi_time_selection.py deleted file mode 100644 index 971caa3e..00000000 --- a/cosipy/event_selection/multi_time_selection.py +++ /dev/null @@ -1,83 +0,0 @@ -import itertools -from typing import Union, Iterable - -import numpy as np -from astropy.time import Time - -from cosipy.interfaces import TimeTagEventInterface, EventInterface -from cosipy.interfaces.event_selection import EventSelectorInterface -from cosipy.util.iterables import itertools_batched - - -class MultiTimeSelector(EventSelectorInterface): - - def __init__(self, tstart_list:Time = None, tstop_list:Time = None, batch_size:int = 10000): - """ - Assumes events are time-ordered - - Parameters - ---------- - tstart_list: - tstop_list: - batch_size: - """ - if tstart_list.isscalar == True: - tstart_list = Time([tstart_list]) - if tstop_list.isscalar == True: - tstop_list = Time([tstop_list]) - - self._tstart_list = tstart_list - self._tstop_list = tstop_list - - self._batch_size = batch_size - - @classmethod - def load_GTI(cls, gti, batch_size:int = 10000): - """ - Instantiate a multi time selector from a good time intervals. - - Parameters - ---------- - gti: - batch_size: - """ - tstart_list = gti.tstart_list - tstop_list = gti.tstop_list - - selector = cls(tstart_list, tstop_list, batch_size) - - return selector - - def _select(self, event:TimeTagEventInterface) -> bool: - # Single event - return next(iter(self.select([event]))) - - def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterface]]) -> Union[bool, Iterable[bool]]: - - if isinstance(events, EventInterface): - # Single event - return self._select(events) - else: - # Multiple - - # Working in chunks/batches. - # This can optimized based on the system - - for chunk in itertools_batched(events, self._batch_size): - - jd1 = [] - jd2 = [] - - for event in chunk: - jd1.append(event.jd1) - jd2.append(event.jd2) - - time = Time(jd1, jd2, format = 'jd') - - indices = np.searchsorted(self._tstart_list, time, side='right') - 1 - valid = (indices >= 0) & (indices < len(self._tstop_list)) - result = np.zeros(len(time), dtype=bool) - result[valid] = time[valid] <= self._tstop_list[indices[valid]] - - for sel in result: - yield sel diff --git a/cosipy/event_selection/time_selection.py b/cosipy/event_selection/time_selection.py index 4bdecd67..897bfc8a 100644 --- a/cosipy/event_selection/time_selection.py +++ b/cosipy/event_selection/time_selection.py @@ -1,3 +1,6 @@ +import logging +logger = logging.getLogger(__name__) + import itertools from typing import Union, Iterable @@ -14,19 +17,79 @@ class TimeSelector(EventSelectorInterface): def __init__(self, tstart:Time = None, tstop:Time = None, batch_size:int = 10000): """ Assumes events are time-ordered + + Selects events that fall within ANY of the time intervals defined by + corresponding pairs of (tstart, tstop). + + Valid combinations: + - (None, None): No time constraints + - (Scalar, None): Single lower bound only + - (None, Scalar): Single upper bound only + - (Scalar, Scalar): Single time interval + - (List, List): Multiple time intervals (same length required) Parameters ---------- - chunk_size : object - Number of events processed at a time - tstart - tstop + tstart: Time, scalar Time, or None + Start time(s). If list, tstop must also be a list of same length. + tstop: Time, scalar Time, or None + Stop time(s). If list, tstart must also be a list of same length. + batch_size: int, default 10000 + Number of events to process at once """ - - self._tstart = tstart - self._tstop = tstop + if tstart is not None and tstop is not None: + if not tstart.isscalar == tstop.isscalar: + logger.error("tstart and tstop must both be scalar or both be list.") + raise ValueError + + elif tstart is None and tstop is not None: + if tstop.isscalar == False: + logger.error("When tstart is None, tstop must not be a list.") + raise ValueError + + elif tstart is not None and tstop is None: + if tstart.isscalar == False: + logger.error("When tstop is None, tstart must not be a list.") + raise ValueError + + # tstart is None and tstop is None -> OK. + + # Convert scalars to lists for uniform processing + if tstart is not None and tstart.isscalar == True: + tstart = Time([tstart]) + + if tstop is not None and tstop.isscalar == True: + tstop = Time([tstop]) + + # length check + if tstart is not None and tstop is not None: + if len(tstart) != len(tstop): + logger.error(f"tstart and tstop must have same length.") + raise ValueError + + self._tstart_list = tstart + self._tstop_list = tstop self._batch_size = batch_size + + @classmethod + def from_gti(cls, gti, batch_size:int = 10000): + """ + Instantiate a multi time selector from good time intervals. + + Parameters + ---------- + gti: + Good time intervals object with tstart_list and tstop_list attributes + batch_size: int + Number of events to process at once + """ + tstart_list = gti.tstart_list + tstop_list = gti.tstop_list + + selector = cls(tstart_list, tstop_list, batch_size) + + return selector def _select(self, event:TimeTagEventInterface) -> bool: # Single event @@ -54,12 +117,25 @@ def select(self, events:Union[TimeTagEventInterface, Iterable[TimeTagEventInterf time = Time(jd1, jd2, format = 'jd') - selected = np.logical_and(np.logical_or(self._tstart is None, time > self._tstart), - np.logical_or(self._tstop is None, time <= self._tstop)) - - for sel in selected: + if self._tstart_list is None and self._tstop_list is None: + result = np.ones(len(time), dtype=bool) + + elif self._tstart_list is None: + result = time <= self._tstop_list[0] + + elif self._tstop_list is None: + result = time > self._tstart_list[0] + + else: + indices = np.searchsorted(self._tstart_list, time, side='right') - 1 + valid = (indices >= 0) & (indices < len(self._tstop_list)) + result = np.zeros(len(time), dtype=bool) + result[valid] = time[valid] <= self._tstop_list[indices[valid]] + + for sel in result: yield sel - if self._tstop is not None and time[-1] > self._tstop: - # Stop further loading of event - return + if self._tstop_list is not None and len(time) > 0: + if time[-1] > self._tstop_list[-1]: + # Stop further loading of event + return From 1ed6dcaadd856d937dcb5f7d991e02c2ae5c979a Mon Sep 17 00:00:00 2001 From: nlopez Date: Fri, 24 Oct 2025 11:11:36 -0400 Subject: [PATCH 107/133] Fix bug. SkyCoord takes latitude, not colatitude. Found by Pascal. Signed-off-by: Israel Martinez --- cosipy/interfaces/event.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cosipy/interfaces/event.py b/cosipy/interfaces/event.py index 92d081d6..5a0e1a60 100644 --- a/cosipy/interfaces/event.py +++ b/cosipy/interfaces/event.py @@ -101,7 +101,7 @@ def scattered_direction_sc(self) -> SkyCoord: Add fancy energy quantity """ return SkyCoord(self.scattered_lon_rad_sc, - np.pi / 2 - self.scattered_lat_rad_sc, + self.scattered_lat_rad_sc, unit=u.rad, frame=SpacecraftFrame()) From 5f1892d4b738c73ea28ae126145d7839553e744b Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 26 Oct 2025 19:02:37 -0400 Subject: [PATCH 108/133] Decouple conventions from astropy Signed-off-by: Israel Martinez --- cosipy/polarization/conventions.py | 182 +++++++++++++++++++++-------- 1 file changed, 133 insertions(+), 49 deletions(-) diff --git a/cosipy/polarization/conventions.py b/cosipy/polarization/conventions.py index 68adbca8..9f995f09 100644 --- a/cosipy/polarization/conventions.py +++ b/cosipy/polarization/conventions.py @@ -1,5 +1,7 @@ +from typing import Union + import numpy as np -from astropy.coordinates import SkyCoord, Angle +from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, frame_transform_graph import astropy.units as u import inspect from scoords import Attitude, SpacecraftFrame @@ -68,8 +70,28 @@ def frame(self): Astropy coordinate frame """ return None - - def get_basis(self, source_direction: SkyCoord): + + def get_basis_local(self, source_vector: np.ndarray): + """ + Get the px,py unit vectors that define the polarization plane on + this convention, and in the convention's frame. + + Polarization angle increments from px to py. + + Parameters + ---------- + source_vector: np.ndarray + Unit cartesian vector. Shape (3,N) + + Returns + ------- + px,py : np.ndarray + Polarization angle increases from px to py. pz is always + the opposite of the source direction --i.e. in the direction of the + particle. + """ + + def get_basis(self, source_direction: SkyCoord, *args, **kwargs): """ Get the px,py unit vectors that define the polarization plane on this convention. Polarization angle increments from px to py. @@ -87,12 +109,26 @@ def get_basis(self, source_direction: SkyCoord): particle. """ + # To the convention's frame + source_vector = source_direction.transform_to(self.frame).cartesian.xyz + + # Bare basis + px,py = self.get_basis_local(source_vector) + + # To SkyCoord in the source frame + px = SkyCoord(*px, representation_type='cartesian', frame=self.frame).transform_to(source_direction.frame) + py = SkyCoord(*py, representation_type='cartesian', frame=self.frame).transform_to(source_direction.frame) + + return px, py + + # Orthographic projection convention class OrthographicConvention(PolarizationConvention): def __init__(self, - ref_vector: SkyCoord = None, + ref_vector: Union[SkyCoord, np.ndarray[float]] = None, + frame:Union[BaseCoordinateFrame, None] = None, clockwise: bool = False): """ The local polarization x-axis points towards an arbitrary reference vector, @@ -101,21 +137,35 @@ def __init__(self, Parameters ---------- - ref_vector : SkyCoord + ref_vector : Union[SkyCoord, np.ndarray[float]] Set the reference vector, defaulting to celestial north if not provided - (IAU convention) + (IAU convention). Alternatively, pass the cartesian representation and set a frame. + frame : BaseCoordinateFrame + Only used if ref_vector is a bare cartesian vector. clockwise : bool Direction of increasing PA, when looking at the source. Default is false --i.e. counter-clockwise when looking outwards. """ if ref_vector is None: - self.ref_vector = SkyCoord(ra=0 * u.deg, dec=90 * u.deg, frame="icrs") + self._ref_vector = np.asarray([0,0,1]) + self._frame = frame else: - self.ref_vector = ref_vector + if isinstance(ref_vector, SkyCoord): + self._ref_vector = ref_vector.cartesian.xyz + self._frame = ref_vector.frame + else: + self._ref_vector = ref_vector + self._frame = frame + + if not isinstance(self._frame, BaseCoordinateFrame): + self._frame = frame_transform_graph.lookup_name(self._frame) self._sign = 1 if clockwise else -1 + def ref_vector(self): + return SkyCoord(self._ref_vector, representation_type = 'cartesian', frame = self.frame) + def __repr__(self): return f"" @@ -128,15 +178,14 @@ def is_clockwise(self): @property def frame(self): - return self.ref_vector.frame + return self._frame - def get_basis(self, source_direction: SkyCoord): + def get_basis_local(self, source_vector: np.ndarray): # Extract Cartesian coordinates for the source direction. - pz = self._sign * source_direction.transform_to(self.frame).cartesian.xyz + pz = self._sign * source_vector[2] # Broadcast reference vector - ref = np.expand_dims(self.ref_vector.cartesian.xyz, - axis = tuple(np.arange(1,pz.ndim, dtype = int))) + ref = np.expand_dims(self._ref_vector, axis = tuple(np.arange(1,pz.ndim, dtype = int))) # Get py. Normalize because pz and ref dot not make 90deg angle py = np.cross(pz, ref, axisa = 0, axisb = 0, axisc = 0) @@ -144,16 +193,39 @@ def get_basis(self, source_direction: SkyCoord): # Get px px = np.cross(py, pz, axisa = 0, axisb = 0, axisc = 0) - - # To SkyCoord - px = SkyCoord(*px, representation_type='cartesian', frame = self.frame) - py = SkyCoord(*py, representation_type='cartesian', frame = self.frame) - + return px, py +class ConventionInSpacecraftFrameMixin: + """ + Checks for a frame with attitude -#https://github.com/zoglauer/megalib/blob/1eaad14c51ec52ad1cb2399a7357fe2ca1074f79/src/cosima/src/MCSource.cc#L3452 -class MEGAlibRelative(OrthographicConvention): + Sub-classes need _frame property, and be sub-classes of PolarizationConvention + """ + + def get_basis(self, source_direction: SkyCoord, attitude=None): + """ + + Parameters + ---------- + source_direction + attitude: This overrides the object frame! + + Returns + ------- + + """ + if self._frame is None and attitude is None: + raise RuntimeError("You need to pass an attitude to convert between local and inertial coordinates") + + if attitude is not None: + self._frame = SpacecraftFrame(attitude=attitude) + + return super().get_basis(source_direction) + + + #https://github.com/zoglauer/megalib/blob/1eaad14c51ec52ad1cb2399a7357fe2ca1074f79/src/cosima/src/MCSource.cc#L3452 +class MEGAlibRelative(OrthographicConvention, ConventionInSpacecraftFrameMixin): def __init__(self, axis, attitude = None): """ @@ -171,20 +243,22 @@ def __init__(self, axis, attitude = None): axis = axis.lower() if axis == 'x': - ref_vector = SkyCoord(lon=0 * u.deg, lat=0 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([1,0,0]) elif axis == 'y': - ref_vector = SkyCoord(lon=90 * u.deg, lat=0 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([0,1,0]) elif axis == 'z': - ref_vector = SkyCoord(lon=0 * u.deg, lat=90 * u.deg, - frame = SpacecraftFrame(attitude = attitude)) + ref_vector = np.asarray([0,0,1]) else: raise ValueError("Axis must be 'x', 'y' or 'z'.") - - super().__init__(ref_vector, clockwise = False) + + if attitude is None: + frame = None + else: + frame = SpacecraftFrame(attitude = attitude) + + super().__init__(ref_vector, frame = frame, clockwise = False) - def get_basis(self, source_direction: SkyCoord): + def get_basis_local(self, source_vector: np.ndarray): # The MEGAlib and orthographic definitions are prett much the same, but # they differ on the order of the cross products @@ -206,12 +280,10 @@ def get_basis(self, source_direction: SkyCoord): # MEGAlib's PA is counter-clockwise when looking at the sourse # Flip px <-> py - py,px = super().get_basis(source_direction) + py,px = super().get_basis_local(source_vector) # Sign of px - py = SkyCoord(-py.cartesian, - representation_type = 'cartesian', - frame = py.frame) + py = -py return px,py @@ -247,13 +319,14 @@ def __init__(self): angle of electric-vector maximum, e, starting from North and increasing through East. """ - super().__init__(ref_vector = SkyCoord(ra=0 * u.deg, dec=90 * u.deg, - frame="icrs"), + super().__init__(ref_vector = [0,0,1], + frame="icrs", clockwise = False) # Stereographic projection convention -class StereographicConvention(PolarizationConvention): +@PolarizationConvention.register("stereographic") +class StereographicConvention(PolarizationConvention, ConventionInSpacecraftFrameMixin): def __init__(self, clockwise: bool = False, @@ -278,20 +351,34 @@ def __init__(self, Spacecraft orientation """ - self._attitude = attitude + if attitude is None: + self._frame = None + else: + self._frame = SpacecraftFrame(attitude=attitude) self._sign = 1 if clockwise else -1 @property def frame(self): - return SpacecraftFrame(attitude = self._attitude) - - def get_basis(self, source_direction: SkyCoord): - # Extract Cartesian coordinates for the source direction - x, y, z = source_direction.cartesian.xyz + return self._frame + + def get_basis_local(self, source_vector:np.ndarray[float]): + """ + source_vector already in SC coordinates as a vector + + Parameters + ---------- + source_vector: (3,N) + + Returns + ------- + px,py: Basis vector. (2,N). Also in SC coordinates + """ + + x,y,z = source_vector # Calculate the projection of the reference vector in stereographic coordinates - px_x = 1 - (x**2 - y**2) / (z + 1) ** 2 + px_x = 1 - (x ** 2 - y ** 2) / (z + 1) ** 2 px_y = -2 * x * y / (z + 1) ** 2 px_z = -2 * x / (z + 1) @@ -303,10 +390,7 @@ def get_basis(self, source_direction: SkyCoord): px /= norm # Calculate the perpendicular vector py using the cross product - py = self._sign*np.cross([x, y, z], px, axis=0) + py = self._sign * np.cross([x, y, z], px, axis=0) + + return px,py - # To SkyCoord - px = SkyCoord(*px, representation_type='cartesian', frame = self.frame) - py = SkyCoord(*py, representation_type='cartesian', frame = self.frame) - - return px, py From 27917c8641eedf56ba96e53916c83ca9d5e7952c Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 26 Oct 2025 19:03:03 -0400 Subject: [PATCH 109/133] doc random event. wip Signed-off-by: Israel Martinez --- cosipy/interfaces/instrument_response_interface.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index 4d1812b9..0f250936 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -76,9 +76,14 @@ def event_probability(self, query: Iterable[Tuple[PhotonInterface, EventInterfac def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInterface]: """ - Return a stream of random events, one per photon + Return a stream of random events, photon by photon. + + The number of output event might be less than the number if input photons, + since some might not be detected """ + + @runtime_checkable class FarFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): From e574ad841ed49a6019f11ce4a688ed528b22e300 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 26 Oct 2025 19:03:14 -0400 Subject: [PATCH 110/133] Add photons with polarization Signed-off-by: Israel Martinez --- cosipy/interfaces/photon_parameters.py | 35 +++++++++++++++++++++++--- cosipy/response/photon_types.py | 20 ++++++++++++++- 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py index 0e4815e6..0fe4516a 100644 --- a/cosipy/interfaces/photon_parameters.py +++ b/cosipy/interfaces/photon_parameters.py @@ -4,6 +4,8 @@ from astropy.coordinates import BaseCoordinateFrame, SkyCoord from scoords import SpacecraftFrame +from cosipy.polarization import PolarizationConvention, PolarizationAngle, StereographicConvention + @runtime_checkable class PhotonInterface(Protocol): @@ -37,7 +39,7 @@ def direction_lon_radians(self) -> float: ... def direction_lat_radians(self) -> float: ... @property - def direction_direction(self) -> SkyCoord: + def direction(self) -> SkyCoord: """ Add fancy energy quantity """ @@ -52,12 +54,39 @@ class PhotonInSCFrameInterface(PhotonInterface, Protocol): @property def frame(self) -> SpacecraftFrame:... +@runtime_checkable class PhotonWithDirectionInSCFrameInterface(PhotonWithDirectionInterface, - PhotonInSCFrameInterface): + PhotonInSCFrameInterface, Protocol): pass +@runtime_checkable class PhotonWithDirectionAndEnergyInSCFrameInterface(PhotonWithDirectionInSCFrameInterface, - PhotonWithEnergyInterface): + PhotonWithEnergyInterface, Protocol): pass +@runtime_checkable +class PolarizedPhotonInterface(Protocol): + + def polarization_angle_rad(self) -> float: ... + + def polarization_convention(self) -> PolarizationConvention:... + + def polarization_angle(self) -> PolarizationAngle: + """ + This convenience function only makes sense for implementations + that couple with PhotonWithDirectionInterface + """ + raise NotImplementedError("This class does not implement the polarization_angle() convenience method.") + +@runtime_checkable +class PolarizedPhotonStereographicConventionInSCInterface(PolarizedPhotonInterface, PhotonInSCFrameInterface, Protocol): + + def polarization_convention(self) -> PolarizationConvention: + return StereographicConvention() + +@runtime_checkable +class PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface(PhotonWithDirectionAndEnergyInSCFrameInterface, PolarizedPhotonStereographicConventionInSCInterface, Protocol): + + def polarization_angle(self) -> PolarizationAngle: + return PolarizationAngle(self._pa * u.rad, self.direction, 'stereographic') diff --git a/cosipy/response/photon_types.py b/cosipy/response/photon_types.py index f092d686..6e06703d 100644 --- a/cosipy/response/photon_types.py +++ b/cosipy/response/photon_types.py @@ -1,7 +1,12 @@ +from astropy.coordinates import SkyCoord from scoords import SpacecraftFrame -from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface +from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface, \ + PolarizedPhotonStereographicConventionInSCInterface, \ + PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface +from cosipy.polarization import PolarizationAngle +from astropy import units as u class PhotonWithDirectionAndEnergyInSCFrame(PhotonWithDirectionAndEnergyInSCFrameInterface): @@ -23,3 +28,16 @@ def direction_lon_radians(self) -> float: @property def direction_lat_radians(self) -> float: return self._lat + +class PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(PhotonWithDirectionAndEnergyInSCFrame, PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface): + + def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV, polarization_angle_radians): + + super().__init__(direction_lon_radians, direction_lat_radians, energy_keV) + + self._pa = polarization_angle_radians + + def polarization_angle_rad(self) -> float: + return self._pa + + From 1994b0cecb66bff5ee5455de08cd35ef6c906098 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 26 Oct 2025 19:03:37 -0400 Subject: [PATCH 111/133] WIP. Most distributions ready for the ideal response. Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 406 ++++++++++++++++++++++++++++++ 1 file changed, 406 insertions(+) create mode 100644 cosipy/response/ideal_response.py diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py new file mode 100644 index 00000000..31d5a339 --- /dev/null +++ b/cosipy/response/ideal_response.py @@ -0,0 +1,406 @@ +from typing import Iterable, Tuple + +from astropy.coordinates import Angle +from astropy.units import Quantity +from scipy.stats import rv_continuous, truncnorm, norm, uniform +from scipy.stats.sampling import SimpleRatioUniforms +import astropy.units as u +import numpy as np + +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface +from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface +from cosipy.interfaces.photon_parameters import PhotonInterface +from cosipy.response.photon_types import PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface as PolDirESCPhoton +from scipy.special import erfi, erf + +def _to_rad(angle): + if isinstance(angle, (Quantity, Angle)): + return angle.to_value(u.rad) + else: + return angle + +class _RVSMixin: + """ + Helper mixin for custom distributions (rv_continuous subclasses) + that will likely only get a sample per setup + + Subclasses need to define _pdf and _cdf + """ + + def _rvs(self, *args, size=None, random_state=None): + + # Faster than default _rvs for large sizes, but slow setup + # Most of the time we'll need a new setup per energy + + if size == tuple(): + # Weird default by rv_continous + size = None + + if size is None: + return super()._rvs(*args, size=size, random_state=random_state) + else: + + rng = SimpleRatioUniforms(self, random_state=random_state) + + return rng.rvs(size=size) + +class KleinNishinaPolarScatteringAngleDist(rv_continuous, _RVSMixin): + """ + Klein-Nishina scattering angle distribution + """ + + def __init__(self, energy, *args, **kwargs): + + super().__init__(0, *args, a=0, b=np.pi, **kwargs) + + self._eps = energy.to_value(u.keV) / 510.99895069 # E/m_ec^2 + + # Normalization + # Mathematica + # Integrate[( + # Sin[\[Theta]] (1 + 1/( + # 1 + \[Epsilon] (1 - Cos[\[Theta]])) + \[Epsilon] (1 - + # Cos[\[Theta]]) - + # Sin[\[Theta]]^2))/(1 + \[Epsilon] (1 - + # Cos[\[Theta]]))^2, {\[Theta], 0, \[Pi]}, + # Assumptions -> {\[Epsilon] > 0}] + + A = 2 * self._eps * (2 + self._eps * (1 + self._eps) * (8 + self._eps)) / (1 + 2 * self._eps) ** 2 + B = (-2 + self._eps * (self._eps - 2)) * np.log(1 + 2 * self._eps) + + self._norm = (A + B) / self._eps ** 3 + + def _pdf(self, phi, *args): + + # Substitute Compton kinematic equation in Klein-Nishina dsigma/dOmega + # Mathematica + # eratio = 1 + self._eps (1 - cos_phi]) (*e/ep*) + # (1/eratio)^2 (1/eratio + eratio - Sin[\[Theta]]^2) Sin[\[Theta]] + + sin_phi = np.sin(phi) + cos_phi = np.cos(phi) + + A = 1 + (1 / (1 + self._eps * (1 - cos_phi))) + self._eps * (1 - cos_phi) - sin_phi ** 2 + B = (1 + self._eps * (1 - cos_phi)) ** 2 + + # Extra sin(phi) to account for phasespace + return sin_phi * A / B / self._norm + + def _cdf(self, phi, *args): + + # Mathematica + # Integrate[( + # Sin[\[Theta]] (1 + 1/( + # 1 + self._eps (1 - cos_phi])) + self._eps (1 - + # cos_phi]) - + # Sin[\[Theta]]^2))/ ((1 + self._eps (1 - + # cos_phi]))^2), {\[Theta], 0, \[Theta]p}, + # Assumptions -> {self._eps > 0, \[Theta]p < \[Pi], \[Theta]p > 0}] + + sin_phi = np.sin(phi) + cos_phi = np.cos(phi) + + eps = self._eps + eps2 = eps * eps + eps3 = eps2 * eps + + A = 1 + eps - eps * cos_phi + logA = np.log(A) + B = (eps * (4 + 10 * eps + 8 * eps2 + eps3) \ + - 2 * eps3 * cos_phi ** 3 \ + + 2 * (1 + eps) ** 2 * (-2 - 2 * eps + eps2) * logA \ + + eps2 * cos_phi ** 2 * (6 + 10 * eps + eps2 + 2 * (-2 - 2 * eps + eps2) * logA) \ + - 2 * eps * cos_phi * (2 + 8 * eps + 8 * eps2 + eps3 \ + + 2 * (-2 - 4 * eps - eps2 + eps3) * logA)) + C = 2 * eps3 * A * A + + return B / C / self._norm + +class KleinNishinaAzimuthalScatteringAngleDist(rv_continuous, _RVSMixin): + + def __init__(self, energy, theta, *args, **kwargs): + """ + Conditional probability, given a polar angle and energy. + + Parameters + ---------- + energy + theta: polar angle + args + kwargs + """ + + super().__init__(0, *args, a=0, b=2*np.pi, **kwargs) + + theta = _to_rad(theta) + + # precompute some stuff + self._eps = energy.to_value(u.keV) / 510.99895069 # E/m_ec^2 + self._sin_theta2 = np.sin(theta) ** 2 + self._energy_ratio = 1 + self._eps * (1 - np.cos(theta)) # From kinematics + self._energy_ratio2 = self._energy_ratio * self._energy_ratio + self._energy_ratio_inv = 1/self._energy_ratio + self._energy_ratio_inv2 = self._energy_ratio_inv * self._energy_ratio_inv + self._energy_ratio_inv3 = self._energy_ratio_inv2 * self._energy_ratio_inv + + # Mathematica + # Integrate[(1/eratio + eratio - 2 sintheta2 Cos[\[Phi]]^2)/ + # eratio^2, {\[Phi], 0, 2 \[Pi]}, + # Assumptions -> {\[Epsilon] > 0}] // FullSimplify + self._norm = 2 * np.pi * (1 + self._energy_ratio2 - self._sin_theta2 * self._energy_ratio) * self._energy_ratio_inv3 + + def _pdf(self, phi, *args): + """ + + Parameters + ---------- + phi: azimuthal angle, starting from the electric field vector direction + args + + Returns + ------- + + """ + + phi = _to_rad(phi) + + cos_phi = np.cos(phi) + + return (self._energy_ratio + self._energy_ratio_inv - 2 * self._sin_theta2 * cos_phi * cos_phi) * self._energy_ratio_inv2 / self._norm + + def _cdf(self, phi, *args): + + phi = _to_rad(phi) + + # Mathematica + # Integrate[(1/eratio + eratio - 2 sintheta2 Cos[\[Phi]]^2)/ + # eratio^2, {\[Phi], 0, \[Phi]lim}, + # Assumptions -> {\[Epsilon] > 0}] // FullSimplify + + A = phi + phi*self._energy_ratio2 - self._energy_ratio * self._sin_theta2 * phi - self._energy_ratio * self._sin_theta2 * np.cos(phi) * np.sin(phi) + + return A * self._energy_ratio_inv3 / self._norm + +class ARMNormDist(rv_continuous): + + def __init__(self, phi, angres, *args, **kwargs): + """ + This accounts for the truncating effect since ARM is limited to [-phi, pi-phi]. + It also accounts for the sin(phi+arm) phasespace + + Parameters + ---------- + phi: Polar scattering angle + angres: Standard deviation of the equivalent gaussian + args + kwargs + """ + + phi = _to_rad(phi) + angres = _to_rad(angres) + + super().__init__(0, *args, a=-phi, b= np.pi - phi, **kwargs) + + # normalized such that int_0^pi random_arm = 1 (already includes sin(phi+arm)) + # Integrate[PDF[TruncatedDistribution[{0,\[Pi]},NormalDistribution [\[Phi],\[Sigma]]], x]Sin[x],{x,0,\[Pi]}]//Re//FullSimplify + # Mathematica couldn't get only the real part analytically + + self._phi = phi + self._angres = angres + + self._norm = np.real( + np.exp(-(angres ** 2 / 2) - 1j * phi) * + (1j * erf((np.pi + 1j * angres ** 2 - phi) / (np.sqrt(2) * angres)) + + np.exp(2j * phi) * (erfi((angres ** 2 - 1j * phi) / (np.sqrt(2) * angres)) - + erfi((1j * np.pi + angres ** 2 - 1j * phi) / (np.sqrt(2) * angres))) + + erfi((angres ** 2 + 1j * phi) / (np.sqrt(2) * angres))) + / (2 * (erf(phi / (np.sqrt(2) * angres)) - erf((-np.pi + phi) / (np.sqrt(2) * angres))))) + + def _pdf(self, arm, *args): + + return truncnorm.pdf(arm, -self._phi / self._angres, (np.pi - self._phi) / self._angres, 0, self._angres) * np.sin(self._phi + arm) / self._norm + + def _rvs(self, *args, size=None, random_state=None): + + rng = SimpleRatioUniforms(self, random_state=random_state) + + return rng.rvs(size=size) + +class ThresholdKleinNishinaPolarScatteringAngleDist(KleinNishinaPolarScatteringAngleDist): + + def __init__(self, energy, energy_threshold=None, *args, **kwargs): + + super().__init__(energy) + + if energy_threshold is None: + self._min_phi = 0 + else: + + # Mathematica + # Solve[e/(e - edepmax) == 1 + \[Epsilon] (1 - (-1)), edepmax] + + max_energy_deposited = 2 * energy * self._eps / (1 + 2 * self._eps) + + if energy_threshold > max_energy_deposited: + raise ValueError( + f"Threshold ({energy_threshold}) is greater than the maximum possible deposited energy ({max_energy_deposited}). PDF cannot be normalized") + + # Mathematica + # Solve[e/(e - ethresh) == + # 1 + \[Epsilon] (1 - Cos[\[Theta]]), \[Theta] ] + + energy_threshold = energy_threshold.to_value(energy.unit) + energy = energy.value + + eps_ediff = self._eps * (energy - energy_threshold) + + self._min_phi = np.arccos((eps_ediff - energy_threshold) / eps_ediff) + + # Renormalize + self._cdf_min_phi = super()._cdf(self._min_phi) + self._norm_factor = 1 / (1 - self._cdf_min_phi) + + def _renormalize(self, phi, prob): + + if np.isscalar(phi): + if phi < self._min_phi: + prob = 0 + else: + prob = np.asarray(prob) + phi = np.asarray(phi) + prob[phi < self._min_phi] = 0 + + prob *= self._norm_factor + + return prob + + def _pdf(self, phi, *args): + + phi = _to_rad(phi) + + prob = super()._pdf(phi, *args) + + return self._renormalize(phi, prob) + + def _cdf(self, phi, *args): + + phi = _to_rad(phi) + + cum_prob = super()._cdf(phi, *args) - self._cdf_min_phi + + return self._renormalize(phi, cum_prob) + +class MeasuredEnergyDist(rv_continuous): + + def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): + """ + This is a *conditional* probability. We will assume the uncertainty on the measured angle phi is 0 + (all the CDS errors will come from the ARM distribution) + + If it is fully absorbed, then the deposited energy equal the initial energy. + + If it escaped, then it will assume that the deposited energy corresponds to the energy of the first hit, + following the Compton equation + + The measured energy will be drawn from a normal distribution + centered at the deposited energy and std equal to energy_deposited*energy_res + + The geometry was not taking into account for the backscatter criterion since it was too complicated. + + Inputs and outputs are values assumed to be in the same units as input energy. + + Parameters + ---------- + energy: initial energy. + energy_res: energy resolution as fraction of the initial energy + phi: polar scattered angle + full_absorp_prob: probability of landing in the photopeak + args + kwargs + """ + + super().__init__(0, *args, a=0, **kwargs) + + if energy_res < 0 or energy_res > 1: + raise ValueError(f"energy_res must be between [0,1]. Got {energy_res}") + + if full_absorp_prob < 0 or full_absorp_prob > 1: + raise ValueError(f"full_absorp_prob must be between [0,1]. Got {full_absorp_prob}") + + eps = (energy / u.Quantity(510.99895069, u.keV)).value + energy = energy.value + + phi = _to_rad(phi) + phi = (phi + np.pi) % (2 * np.pi) - np.pi + energy_deposited = energy / (1 + eps * (1 - np.cos(phi))) + + self._full_prob = full_absorp_prob + self._partial_prob = 1 - full_absorp_prob + + self._dist_full = norm(loc=energy, scale=energy*energy_res) + self._dist_partial = norm(loc=energy_deposited, scale=energy_deposited * energy_res) + + def _pdf(self, measured_energy, *args): + return self._full_prob * self._dist_full.pdf(measured_energy) + self._partial_prob * self._dist_partial.pdf(measured_energy) + + def _cdf(self, measured_energy, *args): + return self._full_prob * self._dist_full.cdf(measured_energy) + self._partial_prob * self._dist_partial.cdf(measured_energy) + + def _rvs(self, *args, size=None, random_state=None): + + full_absorp = uniform.rvs(size=size, random_state = random_state) < self._full_prob + + nfull = np.count_nonzero(full_absorp) + npartial = full_absorp.size - nfull + + samples = np.empty(full_absorp.shape) + + samples[full_absorp] = self._dist_full.rvs(*args, size=nfull, random_state=random_state) + samples[np.logical_not(full_absorp)] = self._dist_partial.rvs(*args, size=npartial, random_state=random_state) + + return samples + +class IdealComptonInstrumentResponseFunction(FarFieldInstrumentResponseFunctionInterface): + + # The photon class and event class that the IRF implementation can handle + photon_type = PolDirESCPhoton + event_type = EmCDSEventInSCFrameInterface + + def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + """ + Return the probability density of measuring a given event given a photon. + + The units of the output the inverse of the phase space of the class event_type data space. + e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV + """ + + # P(phi) * P(E_dep | phi, nulambda, Ei) * P(Em | E_dep) * P(psichi | ) + + def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEventInSCFrameInterface]: + """ + Return a stream of random events, photon by photon. + + The number of output event might be less than the number if input photons, + since some might not be detected + """ + + def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[float]: + """ + + """ + + def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: + """ + + Parameters + ---------- + query + + Returns + ------- + + """ + + + + From be356d1dc425edc5d0d901e02e1ded7c65597474 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Sun, 26 Oct 2025 21:13:13 -0400 Subject: [PATCH 112/133] Revert. The number of detected events is handled the effective area. Separately. Signed-off-by: Israel Martinez --- cosipy/interfaces/instrument_response_interface.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/cosipy/interfaces/instrument_response_interface.py b/cosipy/interfaces/instrument_response_interface.py index 0f250936..4d1812b9 100644 --- a/cosipy/interfaces/instrument_response_interface.py +++ b/cosipy/interfaces/instrument_response_interface.py @@ -76,14 +76,9 @@ def event_probability(self, query: Iterable[Tuple[PhotonInterface, EventInterfac def random_events(self, photons:Iterable[PhotonInterface]) -> Iterable[EventInterface]: """ - Return a stream of random events, photon by photon. - - The number of output event might be less than the number if input photons, - since some might not be detected + Return a stream of random events, one per photon """ - - @runtime_checkable class FarFieldInstrumentResponseFunctionInterface(InstrumentResponseFunctionInterface, Protocol): From baf805bc372bd77c46007d71109750d21c9adfca Mon Sep 17 00:00:00 2001 From: Hiroki Yoneda Date: Tue, 28 Oct 2025 17:39:18 +0900 Subject: [PATCH 113/133] Added the method apply_gti to SpacecraftHistory --- cosipy/spacecraftfile/spacecraft_file.py | 49 ++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 7acea318..32ecaa9b 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -16,6 +16,7 @@ import pandas as pd from .scatt_map import SpacecraftAttitudeMap +from cosipy.event_selection import GoodTimeInterval from typing import Union, Optional @@ -466,6 +467,54 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis return self.__class__(new_obstime, new_attitude, new_location, new_livetime) + def apply_gti(self, gti: GoodTimeInterval) -> "SpacecraftHistory": + """ + Returns the SpacecraftHistory file class object by masking livetimes outside the good time interval. + + Parameters + ---------- + gti: cosipy.event_selection.GoodTimeInterval + + Returns + ------- + cosipy.spacecraft.SpacecraftHistory + """ + new_obstime = None + new_attitude = None + new_location = None + new_livetime = None + + for i, (start, stop) in enumerate(zip(gti.tstart_list, gti.tstop_list)): + # TODO: this line can be replaced with the following line after the PR in the develop branch is merged. + #for i, (start, stop) in enumerate(gti): + _sph = self.select_interval(start, stop) + + _obstime = _sph.obstime + _attitude = _sph._attitude.as_matrix() + _location = _sph._gcrs.cartesian.xyz + _livetime = _sph.livetime + + if i == 0: + new_obstime = _obstime + new_attitude = _attitude + new_location = _location + new_livetime = _livetime + else: + new_obstime = Time(np.append(new_obstime.jd1, _obstime.jd1), + np.append(new_obstime.jd2, _obstime.jd2), + format='jd') + new_attitude = np.append(new_attitude, _attitude, axis = 0) + new_location = np.append(new_location, _location, axis = 1) + new_livetime = np.append(new_livetime, 0 * new_livetime.unit) # assign livetime of zero between GTIs + new_livetime = np.append(new_livetime, _livetime) + + # finalizing + new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) + new_location = GCRS(x = new_location[0], y = new_location[1], z = new_location[2], + representation_type='cartesian') + new_obstime.format = self.obstime.format + + return self.__class__(new_obstime, new_attitude, new_location, new_livetime) def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: From 99c230ee8f14db1223022ebd7e60a45c9f14ca5b Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 28 Oct 2025 17:20:29 -0400 Subject: [PATCH 114/133] Relative coordinates Signed-off-by: Israel Martinez --- cosipy/interfaces/photon_parameters.py | 7 + cosipy/polarization/conventions.py | 15 +- cosipy/response/ideal_response.py | 297 +++++++++++++++++++++--- cosipy/response/photon_types.py | 21 +- cosipy/response/relative_coordinates.py | 273 ++++++++++++++++++++++ 5 files changed, 561 insertions(+), 52 deletions(-) create mode 100644 cosipy/response/relative_coordinates.py diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py index 0fe4516a..9c99055c 100644 --- a/cosipy/interfaces/photon_parameters.py +++ b/cosipy/interfaces/photon_parameters.py @@ -48,6 +48,13 @@ def direction(self) -> SkyCoord: unit=u.rad, frame=self.frame) +# TODO: change to follow the same idea as EventInterface: +# Remove PhotonWithDirectionInterface +# Remove PhotonInSCFrameInterface +# Move direction_lon_radians and direction_lat_radians to PhotonWithDirectionInSCFrameInterface. +# and add a _sc suffix +# I'll need to coordinate first with those who are implemented the responses. + @runtime_checkable class PhotonInSCFrameInterface(PhotonInterface, Protocol): diff --git a/cosipy/polarization/conventions.py b/cosipy/polarization/conventions.py index 9f995f09..9dce0729 100644 --- a/cosipy/polarization/conventions.py +++ b/cosipy/polarization/conventions.py @@ -251,10 +251,7 @@ def __init__(self, axis, attitude = None): else: raise ValueError("Axis must be 'x', 'y' or 'z'.") - if attitude is None: - frame = None - else: - frame = SpacecraftFrame(attitude = attitude) + frame = SpacecraftFrame(attitude = attitude) super().__init__(ref_vector, frame = frame, clockwise = False) @@ -351,10 +348,7 @@ def __init__(self, Spacecraft orientation """ - if attitude is None: - self._frame = None - else: - self._frame = SpacecraftFrame(attitude=attitude) + self._frame = SpacecraftFrame(attitude=attitude) self._sign = 1 if clockwise else -1 @@ -362,7 +356,7 @@ def __init__(self, def frame(self): return self._frame - def get_basis_local(self, source_vector:np.ndarray[float]): + def get_basis_local(self, source_vector:Union[np.ndarray[float], SkyCoord]): """ source_vector already in SC coordinates as a vector @@ -375,6 +369,9 @@ def get_basis_local(self, source_vector:np.ndarray[float]): px,py: Basis vector. (2,N). Also in SC coordinates """ + if isinstance(source_vector, SkyCoord): + source_vector = source_vector.cartesian.xyz + x,y,z = source_vector # Calculate the projection of the reference vector in stereographic coordinates diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index 31d5a339..92786d66 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -1,18 +1,26 @@ -from typing import Iterable, Tuple +from collections.abc import Callable +from typing import Iterable, Tuple, Union from astropy.coordinates import Angle from astropy.units import Quantity -from scipy.stats import rv_continuous, truncnorm, norm, uniform +from more_itertools.more import sample +from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint from scipy.stats.sampling import SimpleRatioUniforms import astropy.units as u import numpy as np from cosipy.interfaces.event import EmCDSEventInSCFrameInterface from cosipy.interfaces.instrument_response_interface import FarFieldInstrumentResponseFunctionInterface -from cosipy.interfaces.photon_parameters import PhotonInterface -from cosipy.response.photon_types import PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface as PolDirESCPhoton +from cosipy.interfaces.photon_parameters import PhotonInterface, PhotonWithDirectionAndEnergyInSCFrameInterface, \ + PhotonWithEnergyInterface, PhotonWithDirectionInSCFrameInterface +from cosipy.response.photon_types import \ + PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface as PolDirESCPhoton, \ + PhotonWithDirectionAndEnergyInSCFrame from scipy.special import erfi, erf +from cosipy.util.iterables import itertools_batched + + def _to_rad(angle): if isinstance(angle, (Quantity, Angle)): return angle.to_value(u.rad) @@ -32,11 +40,7 @@ def _rvs(self, *args, size=None, random_state=None): # Faster than default _rvs for large sizes, but slow setup # Most of the time we'll need a new setup per energy - if size == tuple(): - # Weird default by rv_continous - size = None - - if size is None: + if size is None or size == tuple(): return super()._rvs(*args, size=size, random_state=random_state) else: @@ -44,7 +48,19 @@ def _rvs(self, *args, size=None, random_state=None): return rng.rvs(size=size) -class KleinNishinaPolarScatteringAngleDist(rv_continuous, _RVSMixin): +class _SimpleRVSMixin: + """ + Helper mixin for custom distributions (rv_continuous subclasses) + using SimpleRatioUniforms + + Subclasses need to define _pdf + + """ + def _rvs(self, *args, size=None, random_state=None): + rng = SimpleRatioUniforms(self, random_state=random_state) + return rng.rvs(size=size) + +class KleinNishinaPolarScatteringAngleDist(_RVSMixin, rv_continuous): """ Klein-Nishina scattering angle distribution """ @@ -116,7 +132,7 @@ def _cdf(self, phi, *args): return B / C / self._norm -class KleinNishinaAzimuthalScatteringAngleDist(rv_continuous, _RVSMixin): +class KleinNishinaAzimuthalScatteringAngleDist(_RVSMixin, rv_continuous): def __init__(self, energy, theta, *args, **kwargs): """ @@ -181,7 +197,7 @@ def _cdf(self, phi, *args): return A * self._energy_ratio_inv3 / self._norm -class ARMNormDist(rv_continuous): +class ARMNormDist(_SimpleRVSMixin, rv_continuous): def __init__(self, phi, angres, *args, **kwargs): """ @@ -220,11 +236,62 @@ def _pdf(self, arm, *args): return truncnorm.pdf(arm, -self._phi / self._angres, (np.pi - self._phi) / self._angres, 0, self._angres) * np.sin(self._phi + arm) / self._norm +class ARMMultiNormDist(rv_continuous): + + def __init__(self, phi, angres, angres_weights, *args, **kwargs): + """ + Describe the ARM distribution by a combination of multiple [truncated] gaussians + + Parameters + ---------- + phi + angres + angres_weights + args + kwargs + """ + + phi = _to_rad(phi) + angres = _to_rad(angres) + + super().__init__(0, *args, a=-phi, b= np.pi - phi, **kwargs) + + angres = np.atleast_1d(angres) + + weights = np.broadcast_to(angres_weights, angres.shape) + self._weights = weights / np.sum(weights) + self._dists = [ARMNormDist(phi, res) for res in angres] + + def _pdf(self, arm, *args): + + prob = np.zeros(np.shape(arm)) + + for w,dist in zip(self._weights,self._dists): + prob += w*dist._pdf(arm) + + return prob + def _rvs(self, *args, size=None, random_state=None): - rng = SimpleRatioUniforms(self, random_state=random_state) + if random_state is None: + random_state = self.random_state + + samples = np.empty(size) + + idx = random_state.choice(np.arange(len(self._dists)), size = size, p = self._weights) + + for i in range(len(self._dists)): + + dist = self._dists[i] + + mask = idx == i + + nmask = np.count_nonzero(mask) + + samples[mask] = dist._rvs(size = nmask) + + return samples - return rng.rvs(size=size) class ThresholdKleinNishinaPolarScatteringAngleDist(KleinNishinaPolarScatteringAngleDist): @@ -312,7 +379,7 @@ def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): Parameters ---------- energy: initial energy. - energy_res: energy resolution as fraction of the initial energy + energy_res: function returning the energy resolution function of energy. Both input and output have energy units phi: polar scattered angle full_absorp_prob: probability of landing in the photopeak args @@ -321,14 +388,10 @@ def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): super().__init__(0, *args, a=0, **kwargs) - if energy_res < 0 or energy_res > 1: - raise ValueError(f"energy_res must be between [0,1]. Got {energy_res}") - if full_absorp_prob < 0 or full_absorp_prob > 1: raise ValueError(f"full_absorp_prob must be between [0,1]. Got {full_absorp_prob}") eps = (energy / u.Quantity(510.99895069, u.keV)).value - energy = energy.value phi = _to_rad(phi) phi = (phi + np.pi) % (2 * np.pi) - np.pi @@ -337,8 +400,8 @@ def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): self._full_prob = full_absorp_prob self._partial_prob = 1 - full_absorp_prob - self._dist_full = norm(loc=energy, scale=energy*energy_res) - self._dist_partial = norm(loc=energy_deposited, scale=energy_deposited * energy_res) + self._dist_full = norm(loc=energy.value, scale = energy_res(energy).to_value(energy.unit)) + self._dist_partial = norm(loc=energy_deposited.value, scale = energy_res(energy_deposited).to_value(energy.unit)) def _pdf(self, measured_energy, *args): return self._full_prob * self._dist_full.pdf(measured_energy) + self._partial_prob * self._dist_partial.pdf(measured_energy) @@ -360,12 +423,169 @@ def _rvs(self, *args, size=None, random_state=None): return samples -class IdealComptonInstrumentResponseFunction(FarFieldInstrumentResponseFunctionInterface): +class LogGaussianCosThetaEffectiveArea: + + def __init__(self, + max_area:Quantity, + max_area_energy:Quantity, + sigma_decades: float, + batch_size = 1000): + """ + The effective area is represented as a log-gaussian as function of energy and + a cos(theta) dependence as a function of the instrument colatitude theta. + =0 beyond theta = 90 deg + + Parameters + ---------- + max_area: maximum effective area + max_area_energy: energy where the effective area peaks + sigma_decades: + """ + + self._max_area = max_area + self._max_area_energy = max_area_energy.to_value(u.keV) + self._sigma_decades = sigma_decades + + self._batch_size = batch_size + + def __call__(self, photons = Iterable[PhotonWithDirectionAndEnergyInSCFrameInterface]) -> Iterable[Quantity]: + """ + """ + + for batch in itertools_batched(photons, self._batch_size): + + energy = [] + latitude = [] + + for photon in batch: + + energy.append(photon.energy_keV) + latitude.append(photon.direction_lat_radians) + + energy = np.asarray(energy) + latitude = np.asarray(latitude) + + area = self._max_area * np.exp(-np.log10(energy / self._max_area_energy) ** 2 / 2 / self._sigma_decades / self._sigma_decades) + + area *= np.sin(latitude) + area[latitude < 0] = 0 + + yield from area + +class ConstantFractEnergyRes: + + def __init__(self, energy_res,batch_size = 1000): + """ + + Parameters + ---------- + energy_res: fraction + """ + + self._energy_res = energy_res + self._batch_size = batch_size + + def __call__(self, photons = Iterable[PhotonWithEnergyInterface]) -> Iterable[Quantity]: + """ + """ + + for batch in itertools_batched(photons, self._batch_size): + + energy = np.asarray([photon.energy_keV for photon in batch]) + + yield from Quantity(energy * self._energy_res, u.keV, copy=False) + +class ConstantAngularResolution: + + def __init__(self, angres, weights = None): + self._angres = np.atleast_1d(angres) + + if weights is None: + weights = np.ones(self._angres.size) + + self._weights = weights / np.sum(weights) + + def __call__(self, photons=Iterable[PhotonWithDirectionInSCFrameInterface]) -> Iterable[Quantity]: + for _ in photons: + yield self._angres, self._weights + +class ConstantTimesExponentialCutoffFullAbsorption: + + def __init__(self, base:float, cutoff_energy:Quantity, batch_size = 1000): + self._base = base + self._cutoff_energy = cutoff_energy.to_value(u.keV) + self._batch_size = batch_size + + def __call__(self, photons = Iterable[PhotonWithEnergyInterface]) -> Iterable[Quantity]: + """ + """ + + for batch in itertools_batched(photons, self._batch_size): + + energy = np.asarray([photon.energy_keV for photon in batch]) + + prob = self._base * np.exp(-energy / self._cutoff_energy) + + yield from prob + +class IdealComptonIRF(FarFieldInstrumentResponseFunctionInterface): # The photon class and event class that the IRF implementation can handle photon_type = PolDirESCPhoton event_type = EmCDSEventInSCFrameInterface + def __init__(self, + effective_area:Callable[[Iterable[PhotonInterface]], Quantity], + energy_resolution:Callable[[PhotonInterface], Quantity], + angular_resolution:Callable[[PhotonInterface], Tuple[Quantity, np.ndarray[float]]], + full_absorption_prob:Callable[[Iterable[PhotonInterface]], Quantity], + energy_threshold:Union[None, Quantity] = None, + ): + + self._effective_area = effective_area + self._energy_resolution = energy_resolution + self._angular_resolution = angular_resolution + self._full_prob = full_absorption_prob + + if energy_threshold is None: + self.energy_threshold = 0*u.keV + else: + self._energy_threshold = energy_threshold + + @classmethod + def cosi_like(cls): + """ + Similar performance as COSI. Meant for code development, not science or sensitivity predictions. + + Returns + ------- + + """ + + max_area = 110 * u.cm * u.cm + max_area_energy = 1500 * u.keV + sigma_decades = 0.4 + energy_resolution = 0.01 + angres = np.deg2rad(3) + angres_fact = np.asarray([1 / 3., 1, 3, 9]) + angres_weights = np.asarray([1, 4, 10, 20]) + full_absorption_constant = 0.7 + full_absorption_exp_cutoff = 10*u.MeV + energy_threshold = 20*u.keV + + effective_area = LogGaussianCosThetaEffectiveArea(max_area, max_area_energy, sigma_decades) + energy_resolution = ConstantFractEnergyRes(energy_resolution) + angular_resolution = ConstantAngularResolution(angres * angres_fact, angres_weights) + full_absorption_prob = ConstantTimesExponentialCutoffFullAbsorption(full_absorption_constant, full_absorption_exp_cutoff) + + return cls(effective_area, + energy_resolution, + angular_resolution, + full_absorption_prob, + energy_threshold) + + + def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: """ Return the probability density of measuring a given event given a photon. @@ -379,28 +599,33 @@ def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInS def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEventInSCFrameInterface]: """ Return a stream of random events, photon by photon. - - The number of output event might be less than the number if input photons, - since some might not be detected """ - def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[float]: - """ + for photon in photons: - """ + energy = photon.energy + energy_res = next(self._energy_resolution([photon])) + full_absorp_prob = next(self._full_prob([photon])) - def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: - """ + # Random polar (phi) and azimuthal angle from Klein Nishina + phi = KleinNishinaPolarScatteringAngleDist(energy).rvs(size = 1) + azimuth = KleinNishinaAzimuthalScatteringAngleDist(energy, phi) - Parameters - ---------- - query + # Get the measured energy based on phi and the energy resolution and absroption probabity for the photon location + measured_energy = MeasuredEnergyDist(energy, energy_res, phi, full_absorp_prob).rvs(size = 1) * energy.unit - Returns - ------- + # Get a random ARM + angres, weights = next(self._angular_resolution([photon])) + arm = ARMMultiNormDist(phi, angres, weights) - """ + # Transform arm and az to phichi + def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[float]: + """ + + """ + return [a.to_value(u.cm*u.cm) for a in self._effective_area(photons)] + diff --git a/cosipy/response/photon_types.py b/cosipy/response/photon_types.py index 6e06703d..7e3b2fd3 100644 --- a/cosipy/response/photon_types.py +++ b/cosipy/response/photon_types.py @@ -3,24 +3,31 @@ from cosipy.interfaces.photon_parameters import PhotonWithDirectionAndEnergyInSCFrameInterface, \ PolarizedPhotonStereographicConventionInSCInterface, \ - PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface + PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface, PhotonWithEnergyInterface from cosipy.polarization import PolarizationAngle from astropy import units as u -class PhotonWithDirectionAndEnergyInSCFrame(PhotonWithDirectionAndEnergyInSCFrameInterface): +class PhotonWithEnergy(PhotonWithEnergyInterface): - frame = SpacecraftFrame() - - def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV): + def __init__(self, energy_keV): self._energy = energy_keV - self._lon = direction_lon_radians - self._lat = direction_lat_radians @property def energy_keV(self) -> float: return self._energy +class PhotonWithDirectionAndEnergyInSCFrame(PhotonWithEnergy, PhotonWithDirectionAndEnergyInSCFrameInterface): + + frame = SpacecraftFrame() + + def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV): + + super().__init__(energy_keV) + + self._lon = direction_lon_radians + self._lat = direction_lat_radians + @property def direction_lon_radians(self) -> float: return self._lon diff --git a/cosipy/response/relative_coordinates.py b/cosipy/response/relative_coordinates.py new file mode 100644 index 00000000..5e7eb454 --- /dev/null +++ b/cosipy/response/relative_coordinates.py @@ -0,0 +1,273 @@ +from typing import Union + +import numpy as np +from astropy.coordinates import SkyCoord, Angle +from astropy.units import Quantity +from cosipy.polarization import PolarizationConvention, StereographicConvention + +from astropy import units as u + +class RelativeCDSCoordinates: + + def __init__(self, + source_direction:Union[SkyCoord, np.ndarray[float]], + pol_convention:PolarizationConvention): + """ + Size N + + Parameters + ---------- + source_direction: SkyCoord or normalized vector (3,N) + pol_convention + """ + + if isinstance(source_direction, SkyCoord): + + # Convert to convention frame + self._frame = pol_convention.frame + self._representation_type = source_direction.representation_type + source_direction = source_direction.transform_to(self._frame) + self._source_vec = self._standardize_vector(source_direction) + + else: + + # Assume it's already in the convention frame + self._frame = None + self._representation_type = None + self._source_vec = source_direction + + self._px, self._py = pol_convention.get_basis_local(source_direction) + + @staticmethod + def _standardize_angle(angle): + if isinstance(angle, (Quantity, Angle)): + angle = angle.to_value(u.rad) + + return np.asarray(angle) + + @staticmethod + def _standardize_vector(direction): + if isinstance(direction, SkyCoord): + direction = direction.cartesian.xyz + + return np.asarray(direction) + + def to_cds(self, phi, az): + """ + From coordinate relative to the source direction to the gamma-ray scattered direction. + + Parameters + ---------- + phi: + Angular distance with respect to the source direction. Can have shape (N,) or (N,M). + az: + Azimuthal angle around the source direction, with a 0-direction defined by the + polarization convention. Same size as phi or broadcastable. + + Returns + ------- + The scattered direction + Shape: + If working with pure vectors: (3, N, M) (or broadcastable, e.g. (3,1,M) + If working with SkyCoord: (N, M) + """ + + # 1. Convert to a numpy array of radians + # 2. Add axis to broadcast with x,y,z coordinates + phi = self._standardize_angle(phi) + az = self._standardize_angle(az) + + # Get the right shape for broadcasting + phi,az = np.broadcast_arrays(phi, az) + phi = phi[np.newaxis] + az = az[np.newaxis] + new_dims = tuple(range(self._source_vec.ndim, phi.ndim)) + source_vec = np.expand_dims(self._source_vec, new_dims) + px = np.expand_dims(self._px, new_dims) + py = np.expand_dims(self._py, new_dims) + + # Sum over each basis vector, without allocating multiple arrays + psichi_vec = px * np.cos(az) + psichi_vec += py * np.sin(az) + psichi_vec *= np.sin(phi) + psichi_vec += source_vec * np.cos(phi) + + + # Convert to skycoord if needed + if self._frame is not None: + + psichi = SkyCoord(*psichi_vec, + representation_type='cartesian', + frame=self._frame) + + psichi.representation_type = self._representation_type + + return psichi + + else: + + return psichi_vec + + def to_relative(self, psichi:Union[SkyCoord, np.ndarray[float]]): + """ + From the absolute scattered direction, to the coordinates relative + to the source direction. + + Parameters + ---------- + psichi: + Scattered direction + Can have shape: + - Vector: (3,N) or (3,N,M) (or broadcastable, e.g. (3,1,M) + - Skycoord: (N,) or (N,M). + + Returns + ------- + phi,az: + phi: Angular distance with respect to the source direction. + az: Azimuthal angle around the source direction, with a 0-direction defined by the + polarization convention. + Each with shape (N,M) + """ + + psichi_vec = self._standardize_vector(psichi) + + # Adjust dimensions for broadcasting + new_dims = tuple(range(self._source_vec.ndim, psichi_vec.ndim)) + source_vec = np.expand_dims(self._source_vec, new_dims) + px = np.expand_dims(self._px, new_dims) + py = np.expand_dims(self._py, new_dims) + + # Get the psichi_perp_vec component along each basis vector + psichi_perp_vec = psichi_vec - source_vec + + # This is equivalent to + # psichi_px = np.sum(px * psichi_perp_vec, axis=0) + # but it does not allocate the temporary px*psichi_perp_vec results + subscripts = 'i...,i...->...' # Mean "product-sum over first dimensions" + psichi_px = np.einsum(subscripts, px, psichi_perp_vec) + psichi_py = np.einsum(subscripts, py, psichi_perp_vec) + psichi_dotprod_source = np.einsum(subscripts, source_vec, psichi_vec) + + # Get the angle from the vector + phi = np.arccos(psichi_dotprod_source) + az = np.arctan2(psichi_py, psichi_px) + + return phi, az + + @staticmethod + def get_relative_cds_phase_space(phi_min = None, phi_max = None, arm_min = None, arm_max = None, az_min = None, az_max = None): + """ + The CDS is described by: + phi: the polar scattering angle + psichi: the direction of the scattered gamma + + Given a source direction, psichi can be parametrized with + - arm equals the minimum angular distance between the psichi and a cone centered at the source direction + with hald-opening angle equal to phi + - az: the azimuthal angle around the source direction + + The total phase space of psichi is that of the sphere. If psi is the colatitude and chi the longitude, then + dV = sin(psi) dphi dpsi dchi + + The total phase space is pi (from phi) time 4*pi (from psichi, that is the sphere area) + + In the reparametrization, this is + dV = sin(phi + arm) dphi darm daz + + While the total phase space remains unchanged, in order to integrate this volume in arbitrary limits + you need take into account the fact that phi+arm range is limited to [0,pi]. + + This function performs such integration by checking all possible integration limit cases. + + Parameters + ---------- + phi_min: Defaults to 0 + phi_max: default to pi + arm_min: default to -pi + arm_max: default to pi + az_min: default to 0 + az_max: default to 2*pi + + Returns + ------- + Phase space + """ + + if phi_min is None: + phi_min = 0 + + if phi_max is None: + phi_max = np.pi + + if arm_min is None: + arm_min = -np.pi + + if arm_max is None: + arm_max = np.pi + + if az_min is None: + az_min = 0 + + if az_max is None: + az_max = 2*np.pi + + phi_min = RelativeCDSCoordinates._standardize_angle(phi_min) + phi_max = RelativeCDSCoordinates._standardize_angle(phi_max) + arm_min = RelativeCDSCoordinates._standardize_angle(arm_min) + arm_max = RelativeCDSCoordinates._standardize_angle(arm_max) + az_min = RelativeCDSCoordinates._standardize_angle(az_min) + az_max = RelativeCDSCoordinates._standardize_angle(az_max) + + phi_min, phi_max, arm_min, arm_max, az_min, az_max = np.broadcast_arrays(phi_min, phi_max, arm_min, arm_max, az_min, az_max) + + # Handle cases in between the physical boundaries + # Integrate excluding unphysical corners + # Remove unphysical rectangles + arm_min = np.choose((arm_min < -phi_max) & (-phi_max < arm_max), [arm_min, -phi_max]) + arm_max = np.choose((arm_min < np.pi - phi_min) & (np.pi - phi_min < arm_max), [arm_max, np.pi - phi_min]) + + phi_min = np.choose((phi_min < -arm_max) & (-arm_max < phi_max), [phi_min, -arm_max]) + phi_max = np.choose((phi_min < np.pi - arm_min) & (np.pi - arm_min < phi_max), [phi_max, np.pi - arm_min]) + + integral_rect = (az_max - az_min) * ( + -np.sin(arm_min + phi_min) + np.sin(arm_max + phi_min) + np.sin(arm_min + phi_max) - np.sin(arm_max + phi_max)) + + # Remove unphysical corners (triangles or trapezoids) + # Note the (phi1 + arm1) and (phi2 + arm2) masks in front + + # Lower left corner (low phi, low arm) + # Integrate[Sin[phi+arm],{phi,phi1,phi2},{arm,arm1, -phi}]//FullSimplify + phil = np.maximum(-arm_max, phi_min) + phih = np.minimum(-arm_min, phi_max) + unphys_lowerleft_integral = -phih + phil + np.sin(arm_min + phih) - np.sin(arm_min + phil) + unphys_lowerleft_integral *= (phil + arm_min < 0) + integral = integral_rect - (az_max - az_min) * unphys_lowerleft_integral + + # Upper right corner (high phi, high arm) + # Integrate[Sin[phi+arm],{phi,phi1,phi2}, {arm, \[Pi]-phi, arm2}]//FullSimplify + phil = np.maximum(np.pi - arm_max, phi_min) + phih = np.minimum(np.pi - arm_min, phi_max) + unphys_upperright_integral = phil - phih + np.sin(arm_max + phil) - np.sin(arm_max + phih) + unphys_upperright_integral *= (phih + arm_max > np.pi) + integral -= (az_max - az_min) * unphys_upperright_integral + + # Handle fully physical or fully unphysical + fully_phys = (phi_min + arm_min >= 0) & (phi_max + arm_max <= np.pi) + fully_unphys = (phi_max + arm_max <= 0) | (phi_min + arm_min >= np.pi) + + # Mathematica: Integrate[Sin[phi+arm], {phi,phi1,phi2} , {arm,arm1,arm2}]//FullSimplify + integral_full = (az_max - az_min) * ( + -np.sin(arm_min + phi_min) + np.sin(arm_max + phi_min) + np.sin(arm_min + phi_max) - np.sin(arm_max + phi_max)) + + if integral.ndim == 0: + if fully_phys: + return integral + if fully_unphys: + return 0 + else: + integral[fully_phys] = integral_full[fully_phys] + integral[fully_unphys] = 0 + + return integral + From a061b9814cef303cc23cfe51d040bb5024c22600 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 29 Oct 2025 10:48:36 -0400 Subject: [PATCH 115/133] Faster conversion by avoiding intermediate perpendicular vector step. Suggestion by Pascal Signed-off-by: Israel Martinez --- cosipy/response/relative_coordinates.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/cosipy/response/relative_coordinates.py b/cosipy/response/relative_coordinates.py index 5e7eb454..0d639b47 100644 --- a/cosipy/response/relative_coordinates.py +++ b/cosipy/response/relative_coordinates.py @@ -139,19 +139,17 @@ def to_relative(self, psichi:Union[SkyCoord, np.ndarray[float]]): py = np.expand_dims(self._py, new_dims) # Get the psichi_perp_vec component along each basis vector - psichi_perp_vec = psichi_vec - source_vec - # This is equivalent to - # psichi_px = np.sum(px * psichi_perp_vec, axis=0) + # psichi_px_component = np.sum(px * psichi_perp_vec, axis=0) + # for each component # but it does not allocate the temporary px*psichi_perp_vec results - subscripts = 'i...,i...->...' # Mean "product-sum over first dimensions" - psichi_px = np.einsum(subscripts, px, psichi_perp_vec) - psichi_py = np.einsum(subscripts, py, psichi_perp_vec) - psichi_dotprod_source = np.einsum(subscripts, source_vec, psichi_vec) + # and performs the full operation in one step + psichi_px_component, psichi_py_component, psichi_source_component = \ + np.einsum('ji...,ji...->j...',[px,py,source_vec], psichi_vec[np.newaxis]) # Get the angle from the vector - phi = np.arccos(psichi_dotprod_source) - az = np.arctan2(psichi_py, psichi_px) + phi = np.arccos(psichi_source_component) + az = np.arctan2(psichi_py_component, psichi_px_component) return phi, az From a5780ffff84e2656dd90f60868e722017235c886 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 29 Oct 2025 16:10:35 -0400 Subject: [PATCH 116/133] ideal response random event working! Signed-off-by: Israel Martinez --- cosipy/interfaces/photon_parameters.py | 5 + cosipy/response/ideal_response.py | 139 ++++++++++++++++-------- cosipy/response/relative_coordinates.py | 4 +- 3 files changed, 99 insertions(+), 49 deletions(-) diff --git a/cosipy/interfaces/photon_parameters.py b/cosipy/interfaces/photon_parameters.py index 9c99055c..ce875ba2 100644 --- a/cosipy/interfaces/photon_parameters.py +++ b/cosipy/interfaces/photon_parameters.py @@ -74,10 +74,13 @@ class PhotonWithDirectionAndEnergyInSCFrameInterface(PhotonWithDirectionInSCFram @runtime_checkable class PolarizedPhotonInterface(Protocol): + @property def polarization_angle_rad(self) -> float: ... + @property def polarization_convention(self) -> PolarizationConvention:... + @property def polarization_angle(self) -> PolarizationAngle: """ This convenience function only makes sense for implementations @@ -88,12 +91,14 @@ def polarization_angle(self) -> PolarizationAngle: @runtime_checkable class PolarizedPhotonStereographicConventionInSCInterface(PolarizedPhotonInterface, PhotonInSCFrameInterface, Protocol): + @property def polarization_convention(self) -> PolarizationConvention: return StereographicConvention() @runtime_checkable class PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface(PhotonWithDirectionAndEnergyInSCFrameInterface, PolarizedPhotonStereographicConventionInSCInterface, Protocol): + @property def polarization_angle(self) -> PolarizationAngle: return PolarizationAngle(self._pa * u.rad, self.direction, 'stereographic') diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index 92786d66..bc96025b 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -1,9 +1,14 @@ +import warnings from collections.abc import Callable from typing import Iterable, Tuple, Union from astropy.coordinates import Angle from astropy.units import Quantity +from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame +from cosipy.polarization import StereographicConvention +from cosipy.response.relative_coordinates import RelativeCDSCoordinates from more_itertools.more import sample +from scipy.optimize import minimize_scalar from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint from scipy.stats.sampling import SimpleRatioUniforms import astropy.units as u @@ -20,45 +25,70 @@ from cosipy.util.iterables import itertools_batched - def _to_rad(angle): if isinstance(angle, (Quantity, Angle)): return angle.to_value(u.rad) else: return angle -class _RVSMixin: + +class _SimpleRVSMixin: """ Helper mixin for custom distributions (rv_continuous subclasses) - that will likely only get a sample per setup + using SimpleRatioUniforms + + Subclasses need to define _pdf - Subclasses need to define _pdf and _cdf """ - def _rvs(self, *args, size=None, random_state=None): + @property + def _mode(self): + # Return analytic mode if you can. + # Otherwise it will be estimated numerically + return None - # Faster than default _rvs for large sizes, but slow setup - # Most of the time we'll need a new setup per energy + def _simple_ratio_uniforms_rvs(self, *args, size=None, random_state=None): + if warnings.catch_warnings(): + # Suppress warning + # "WARNING RuntimeWarning: [objid: SROU] 22 : mode: try finding it (numerically) => (distribution) incomplete distribution object, entry missing" + # when the mode need to be computed analytically - if size is None or size == tuple(): - return super()._rvs(*args, size=size, random_state=random_state) - else: + if self._mode is None: + warnings.filterwarnings( + "ignore", + message=r".*\[objid: SROU\].*", + category=RuntimeWarning, + ) - rng = SimpleRatioUniforms(self, random_state=random_state) + rng = SimpleRatioUniforms(self, random_state=random_state, mode=self._mode) - return rng.rvs(size=size) + if size == (): + # SimpleRatioUniforms.rvs expects an integer, tuple of integers or None. + # It crashes with an empty tuple, which corresponds to a scalar. + size = None -class _SimpleRVSMixin: + return rng.rvs(size=size) + + def _rvs(self, *args, **kwargs): + return self._simple_ratio_uniforms_rvs(*args, **kwargs) + +class _RVSMixin(_SimpleRVSMixin): """ Helper mixin for custom distributions (rv_continuous subclasses) - using SimpleRatioUniforms - - Subclasses need to define _pdf + that will likely only get a sample per setup + Subclasses need to define _pdf and _cdf """ - def _rvs(self, *args, size=None, random_state=None): - rng = SimpleRatioUniforms(self, random_state=random_state) - return rng.rvs(size=size) + + def _rvs(self, *args, size=None, **kwargs): + + # Faster than default _rvs for large sizes, but slow setup + # Most of the time we'll need a new setup per energy + + if size is None or size == tuple(): + return super()._rvs(*args, size=size, **kwargs) + else: + return self._simple_ratio_uniforms_rvs(*args, size = size, **kwargs) class KleinNishinaPolarScatteringAngleDist(_RVSMixin, rv_continuous): """ @@ -232,9 +262,11 @@ def __init__(self, phi, angres, *args, **kwargs): erfi((angres ** 2 + 1j * phi) / (np.sqrt(2) * angres))) / (2 * (erf(phi / (np.sqrt(2) * angres)) - erf((-np.pi + phi) / (np.sqrt(2) * angres))))) + self._truncnorm_dist = truncnorm(-self._phi / self._angres, (np.pi - self._phi) / self._angres, 0, self._angres) + def _pdf(self, arm, *args): - return truncnorm.pdf(arm, -self._phi / self._angres, (np.pi - self._phi) / self._angres, 0, self._angres) * np.sin(self._phi + arm) / self._norm + return self._truncnorm_dist.pdf(arm) * np.sin(self._phi + arm) / self._norm class ARMMultiNormDist(rv_continuous): @@ -292,7 +324,6 @@ def _rvs(self, *args, size=None, random_state=None): return samples - class ThresholdKleinNishinaPolarScatteringAngleDist(KleinNishinaPolarScatteringAngleDist): def __init__(self, energy, energy_threshold=None, *args, **kwargs): @@ -474,7 +505,7 @@ def __call__(self, photons = Iterable[PhotonWithDirectionAndEnergyInSCFrameInter class ConstantFractEnergyRes: - def __init__(self, energy_res,batch_size = 1000): + def __init__(self, energy_res): """ Parameters @@ -483,17 +514,12 @@ def __init__(self, energy_res,batch_size = 1000): """ self._energy_res = energy_res - self._batch_size = batch_size - def __call__(self, photons = Iterable[PhotonWithEnergyInterface]) -> Iterable[Quantity]: + def __call__(self, energy) -> Quantity: """ """ - for batch in itertools_batched(photons, self._batch_size): - - energy = np.asarray([photon.energy_keV for photon in batch]) - - yield from Quantity(energy * self._energy_res, u.keV, copy=False) + return self._energy_res * energy class ConstantAngularResolution: @@ -536,7 +562,7 @@ class IdealComptonIRF(FarFieldInstrumentResponseFunctionInterface): def __init__(self, effective_area:Callable[[Iterable[PhotonInterface]], Quantity], - energy_resolution:Callable[[PhotonInterface], Quantity], + energy_resolution:Callable[[Quantity], Quantity], angular_resolution:Callable[[PhotonInterface], Tuple[Quantity, np.ndarray[float]]], full_absorption_prob:Callable[[Iterable[PhotonInterface]], Quantity], energy_threshold:Union[None, Quantity] = None, @@ -553,7 +579,17 @@ def __init__(self, self._energy_threshold = energy_threshold @classmethod - def cosi_like(cls): + def cosi_like(cls, + max_area = 110 * u.cm * u.cm, + max_area_energy = 1500 * u.keV, + sigma_decades = 0.4, + energy_resolution = 0.01, + angres = 3*u.deg, + angres_fact = [1 / 3., 1, 3, 9], + angres_weights = [1, 4, 10, 20], + full_absorption_constant = 0.5, + full_absorption_exp_cutoff = 10*u.MeV, + energy_threshold = 20*u.keV): """ Similar performance as COSI. Meant for code development, not science or sensitivity predictions. @@ -562,16 +598,19 @@ def cosi_like(cls): """ - max_area = 110 * u.cm * u.cm - max_area_energy = 1500 * u.keV - sigma_decades = 0.4 - energy_resolution = 0.01 - angres = np.deg2rad(3) - angres_fact = np.asarray([1 / 3., 1, 3, 9]) - angres_weights = np.asarray([1, 4, 10, 20]) - full_absorption_constant = 0.7 - full_absorption_exp_cutoff = 10*u.MeV - energy_threshold = 20*u.keV + max_area = 110 * u.cm * u.cm if max_area is None else max_area + max_area_energy = 1500 * u.keV if max_area_energy is None else max_area_energy + sigma_decades = 0.4 if sigma_decades is None else sigma_decades + energy_resolution = 0.01 if energy_resolution is None else energy_resolution + angres = 3 * u.deg if angres is None else angres + angres_fact = [1 / 3., 1, 3, 9] if angres_fact is None else angres_fact + angres_weights = [1, 4, 10, 20] if angres_weights is None else angres_weights + full_absorption_constant = 0.7 if full_absorption_constant is None else full_absorption_constant + full_absorption_exp_cutoff = 10 * u.MeV if full_absorption_exp_cutoff is None else full_absorption_exp_cutoff + energy_threshold = 20 * u.keV if energy_threshold is None else energy_threshold + + angres_fact = np.asarray(angres_fact) + angres_weights = np.asarray(angres_weights) effective_area = LogGaussianCosThetaEffectiveArea(max_area, max_area_energy, sigma_decades) energy_resolution = ConstantFractEnergyRes(energy_resolution) @@ -604,22 +643,28 @@ def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEve for photon in photons: energy = photon.energy - energy_res = next(self._energy_resolution([photon])) full_absorp_prob = next(self._full_prob([photon])) # Random polar (phi) and azimuthal angle from Klein Nishina - phi = KleinNishinaPolarScatteringAngleDist(energy).rvs(size = 1) - azimuth = KleinNishinaAzimuthalScatteringAngleDist(energy, phi) + phi = ThresholdKleinNishinaPolarScatteringAngleDist(energy, self._energy_threshold).rvs() + azimuth = KleinNishinaAzimuthalScatteringAngleDist(energy, phi).rvs() + azimuth += photon.polarization_angle_rad() # Get the measured energy based on phi and the energy resolution and absroption probabity for the photon location - measured_energy = MeasuredEnergyDist(energy, energy_res, phi, full_absorp_prob).rvs(size = 1) * energy.unit + measured_energy = MeasuredEnergyDist(energy, self._energy_resolution, phi, full_absorp_prob).rvs() + measured_energy_keV = Quantity(measured_energy, energy.unit, copy=False).to_value(u.keV) # Get a random ARM angres, weights = next(self._angular_resolution([photon])) - arm = ARMMultiNormDist(phi, angres, weights) + arm = ARMMultiNormDist(phi, angres, weights).rvs() - # Transform arm and az to phichi + # Transform arm and az to psichi + psichi = RelativeCDSCoordinates(photon.direction, StereographicConvention()).to_cds(phi + arm, azimuth) + # Put everything in the output event + # The assummed probability assumes that phi is measured exactly, all the uncertainty comes from the error + # in psichi (through the ARM) + yield EmCDSEventInSCFrame(measured_energy_keV, phi, psichi.lon.rad, psichi.lat.rad) def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[float]: diff --git a/cosipy/response/relative_coordinates.py b/cosipy/response/relative_coordinates.py index 0d639b47..81ed4a56 100644 --- a/cosipy/response/relative_coordinates.py +++ b/cosipy/response/relative_coordinates.py @@ -127,7 +127,7 @@ def to_relative(self, psichi:Union[SkyCoord, np.ndarray[float]]): phi: Angular distance with respect to the source direction. az: Azimuthal angle around the source direction, with a 0-direction defined by the polarization convention. - Each with shape (N,M) + Each with shape (N,M). Angles. """ psichi_vec = self._standardize_vector(psichi) @@ -151,7 +151,7 @@ def to_relative(self, psichi:Union[SkyCoord, np.ndarray[float]]): phi = np.arccos(psichi_source_component) az = np.arctan2(psichi_py_component, psichi_px_component) - return phi, az + return Angle(phi, unit=u.rad, copy=False), Angle(az, unit=u.rad, copy=False) @staticmethod def get_relative_cds_phase_space(phi_min = None, phi_max = None, arm_min = None, arm_max = None, az_min = None, az_max = None): From 09cbaf29071e4ba4d5c6261b639a26d8b84a8656 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Thu, 30 Oct 2025 17:01:18 -0400 Subject: [PATCH 117/133] ideal response pdf working! Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 150 ++++++++++++++++++++++++++---- cosipy/response/photon_types.py | 1 + 2 files changed, 134 insertions(+), 17 deletions(-) diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index bc96025b..4704d86a 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -2,12 +2,13 @@ from collections.abc import Callable from typing import Iterable, Tuple, Union -from astropy.coordinates import Angle +from astropy.coordinates import Angle, SkyCoord from astropy.units import Quantity from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame from cosipy.polarization import StereographicConvention from cosipy.response.relative_coordinates import RelativeCDSCoordinates from more_itertools.more import sample +from numpy._typing import NDArray from scipy.optimize import minimize_scalar from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint from scipy.stats.sampling import SimpleRatioUniforms @@ -24,6 +25,8 @@ from scipy.special import erfi, erf from cosipy.util.iterables import itertools_batched +from scoords import SpacecraftFrame + def _to_rad(angle): if isinstance(angle, (Quantity, Angle)): @@ -168,6 +171,8 @@ def __init__(self, energy, theta, *args, **kwargs): """ Conditional probability, given a polar angle and energy. + NOTE: input phi in pdf(phi) and cdf(phi) MUST lie between [0,2*pi]. The results are unpredictable otherwise. + Parameters ---------- energy @@ -331,6 +336,8 @@ def __init__(self, energy, energy_threshold=None, *args, **kwargs): super().__init__(energy) if energy_threshold is None: + self._renormalizable = True + self._renormalizable_error = None self._min_phi = 0 else: @@ -340,23 +347,31 @@ def __init__(self, energy, energy_threshold=None, *args, **kwargs): max_energy_deposited = 2 * energy * self._eps / (1 + 2 * self._eps) if energy_threshold > max_energy_deposited: - raise ValueError( - f"Threshold ({energy_threshold}) is greater than the maximum possible deposited energy ({max_energy_deposited}). PDF cannot be normalized") + self._renormalizable = False + self._renormalizable_error = ValueError( + f"Threshold ({energy_threshold}) is greater than the maximum possible deposited energy ({max_energy_deposited}). PDF cannot be normalized") + else: + self._renormalizable = True + self._renormalizable_error = None - # Mathematica - # Solve[e/(e - ethresh) == - # 1 + \[Epsilon] (1 - Cos[\[Theta]]), \[Theta] ] + # Mathematica + # Solve[e/(e - ethresh) == + # 1 + \[Epsilon] (1 - Cos[\[Theta]]), \[Theta] ] - energy_threshold = energy_threshold.to_value(energy.unit) - energy = energy.value + energy_threshold = energy_threshold.to_value(energy.unit) + energy = energy.value - eps_ediff = self._eps * (energy - energy_threshold) + eps_ediff = self._eps * (energy - energy_threshold) - self._min_phi = np.arccos((eps_ediff - energy_threshold) / eps_ediff) + self._min_phi = np.arccos((eps_ediff - energy_threshold) / eps_ediff) # Renormalize - self._cdf_min_phi = super()._cdf(self._min_phi) - self._norm_factor = 1 / (1 - self._cdf_min_phi) + self._cdf_min_phi = None + self._norm_factor = None + + if self._renormalizable: + self._cdf_min_phi = super()._cdf(self._min_phi) + self._norm_factor = 1 / (1 - self._cdf_min_phi) def _renormalize(self, phi, prob): @@ -374,6 +389,13 @@ def _renormalize(self, phi, prob): def _pdf(self, phi, *args): + if not self._renormalizable: + # While the PDF can't be normalized, + # and there we can't have a CDF or RVS, + # we can still return the probability = 0 + # to prevent other code from crashing + return np.zeros_like(phi) + phi = _to_rad(phi) prob = super()._pdf(phi, *args) @@ -382,12 +404,21 @@ def _pdf(self, phi, *args): def _cdf(self, phi, *args): + if not self._renormalizable: + raise self._renormalizable_error + phi = _to_rad(phi) cum_prob = super()._cdf(phi, *args) - self._cdf_min_phi return self._renormalize(phi, cum_prob) + def _rvs(self, *args, **kwargs): + if not self._renormalizable: + raise self._renormalizable_error + + return super()._rvs(*args, **kwargs) + class MeasuredEnergyDist(rv_continuous): def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): @@ -425,8 +456,7 @@ def __init__(self, energy, energy_res, phi, full_absorp_prob, *args, **kwargs): eps = (energy / u.Quantity(510.99895069, u.keV)).value phi = _to_rad(phi) - phi = (phi + np.pi) % (2 * np.pi) - np.pi - energy_deposited = energy / (1 + eps * (1 - np.cos(phi))) + energy_deposited = energy * (1 - 1 / (1 + eps * (1 - np.cos(phi)))) self._full_prob = full_absorp_prob self._partial_prob = 1 - full_absorp_prob @@ -578,6 +608,8 @@ def __init__(self, else: self._energy_threshold = energy_threshold + self._pol_convention = StereographicConvention() + @classmethod def cosi_like(cls, max_area = 110 * u.cm * u.cm, @@ -623,17 +655,101 @@ def cosi_like(cls, full_absorption_prob, energy_threshold) + def _event_probability(self, photon:PolDirESCPhoton, + phi:float, + events:Iterable[EmCDSEventInSCFrameInterface]) -> Iterable[float]: + """ + Computes the probability for a given set of photon parameters, and for all events with the same phi + + Note: it is assumed that all events have the same phi!!! + """ + + # Get some needed values from this query + photon_energy_keV = photon.energy_keV + photon_energy = Quantity(photon_energy_keV, u.keV, copy = False) + measured_energy_keV = np.asarray([event.energy_keV for event in events]) + full_absorp_prob = next(self._full_prob([photon])) + angres, weights = next(self._angular_resolution([photon])) + pa = photon.polarization_angle_rad + psichi_lon = [event.scattered_lon_rad_sc for event in events] + psichi_lat = [event.scattered_lat_rad_sc for event in events] + psichi = SkyCoord(lon = psichi_lon, lat = psichi_lat, unit = u.rad, frame = SpacecraftFrame()) + + # Convert CDF to relative + phi_geom, az = RelativeCDSCoordinates(photon.direction, self._pol_convention).to_relative(psichi) + + # Get probability + # We're assuming the phi measured from kinematics has no errors. Otherwise, the calculation became too complex + # All directional error come from the uncertainty on psichi (through the ARM, in psichi_geom) + # P(phi|Ei) * P(Em | Ei, phi) * P(psichi | phi, Ei, PA) + # P(psichi | phi, Ei, PA) = P(arm | phi) * P(az | phi, Ei) + prob = ThresholdKleinNishinaPolarScatteringAngleDist(photon_energy, self._energy_threshold).pdf(phi) + prob *= MeasuredEnergyDist(photon_energy, self._energy_resolution, phi, full_absorp_prob).pdf(measured_energy_keV) + prob *= ARMMultiNormDist(phi, angres, weights).pdf(phi_geom.rad - phi) + + prob *= KleinNishinaAzimuthalScatteringAngleDist(photon_energy, phi).pdf((az.rad - pa) % (2*np.pi)) + + yield from prob def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: """ Return the probability density of measuring a given event given a photon. The units of the output the inverse of the phase space of the class event_type data space. - e.g. if the event measured energy in keV, the units of output of this function are implicitly 1/keV + e.g. if the event measured photon_energy in keV, the units of output of this function are implicitly 1/keV + + NOTE: this implementation runs fast if you sort the queries by photon, following by the event phi. """ - # P(phi) * P(E_dep | phi, nulambda, Ei) * P(Em | E_dep) * P(psichi | ) + # This allows to sample the PDF for multiple values at once + # Multiple event with the phi pretty much only happen during testing though, + # since for real data the same measured values will not be repeating + last_photon = None + last_phi = None + cached_events = [] + + for photon,event in query: + + phi = event.scattering_angle_rad + + if last_photon is None: + # This only happens for the first event + last_photon = photon + last_phi = phi + cached_events = [event] + continue + + if photon is last_photon: + # We can keep caching values, unless phi changed + + if last_phi is phi: + # Same photon and phi. Keep caching events + cached_events.append(event) + else: + # It's not longer the same. We now need to evaluate and yield what we have so far + yield from self._event_probability(last_photon, last_phi, cached_events) + + # Restart + last_photon = photon + last_phi = phi + cached_events = [event] + + else: + # It's not longer the same. We now need to evaluate and yield what we have so far + yield from self._event_probability(last_photon, last_phi, cached_events) + + # Restart + last_photon = photon + last_phi = phi + cached_events = [event] + + # Yield the probability for the leftover events + yield from self._event_probability(last_photon, last_phi, cached_events) + + + + def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEventInSCFrameInterface]: """ @@ -659,7 +775,7 @@ def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEve arm = ARMMultiNormDist(phi, angres, weights).rvs() # Transform arm and az to psichi - psichi = RelativeCDSCoordinates(photon.direction, StereographicConvention()).to_cds(phi + arm, azimuth) + psichi = RelativeCDSCoordinates(photon.direction, self._pol_convention).to_cds(phi + arm, azimuth) # Put everything in the output event # The assummed probability assumes that phi is measured exactly, all the uncertainty comes from the error diff --git a/cosipy/response/photon_types.py b/cosipy/response/photon_types.py index 7e3b2fd3..61d70a33 100644 --- a/cosipy/response/photon_types.py +++ b/cosipy/response/photon_types.py @@ -44,6 +44,7 @@ def __init__(self, direction_lon_radians, direction_lat_radians, energy_keV, pol self._pa = polarization_angle_radians + @property def polarization_angle_rad(self) -> float: return self._pa From b16324fd5acc29f8f09990c8ed4d1c047d267182 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 31 Oct 2025 14:25:37 -0400 Subject: [PATCH 118/133] Polarized and unpolarized version Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 36 ++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index 4704d86a..303d8530 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -584,10 +584,10 @@ def __call__(self, photons = Iterable[PhotonWithEnergyInterface]) -> Iterable[Qu yield from prob -class IdealComptonIRF(FarFieldInstrumentResponseFunctionInterface): +class UnpolarizedIdealComptonIRF(FarFieldInstrumentResponseFunctionInterface): # The photon class and event class that the IRF implementation can handle - photon_type = PolDirESCPhoton + photon_type = PhotonWithDirectionAndEnergyInSCFrameInterface event_type = EmCDSEventInSCFrameInterface def __init__(self, @@ -595,7 +595,7 @@ def __init__(self, energy_resolution:Callable[[Quantity], Quantity], angular_resolution:Callable[[PhotonInterface], Tuple[Quantity, np.ndarray[float]]], full_absorption_prob:Callable[[Iterable[PhotonInterface]], Quantity], - energy_threshold:Union[None, Quantity] = None, + energy_threshold:Union[None, Quantity] = None ): self._effective_area = effective_area @@ -655,6 +655,12 @@ def cosi_like(cls, full_absorption_prob, energy_threshold) + def _az_prob(self, photon, phi, az): + return 1/2/np.pi + + def _random_az(self, photon, phi): + return 2*np.pi*uniform.rvs() + def _event_probability(self, photon:PolDirESCPhoton, phi:float, events:Iterable[EmCDSEventInSCFrameInterface]) -> Iterable[float]: @@ -670,7 +676,6 @@ def _event_probability(self, photon:PolDirESCPhoton, measured_energy_keV = np.asarray([event.energy_keV for event in events]) full_absorp_prob = next(self._full_prob([photon])) angres, weights = next(self._angular_resolution([photon])) - pa = photon.polarization_angle_rad psichi_lon = [event.scattered_lon_rad_sc for event in events] psichi_lat = [event.scattered_lat_rad_sc for event in events] psichi = SkyCoord(lon = psichi_lon, lat = psichi_lat, unit = u.rad, frame = SpacecraftFrame()) @@ -687,10 +692,9 @@ def _event_probability(self, photon:PolDirESCPhoton, prob = ThresholdKleinNishinaPolarScatteringAngleDist(photon_energy, self._energy_threshold).pdf(phi) prob *= MeasuredEnergyDist(photon_energy, self._energy_resolution, phi, full_absorp_prob).pdf(measured_energy_keV) prob *= ARMMultiNormDist(phi, angres, weights).pdf(phi_geom.rad - phi) + prob *= self._az_prob(photon, phi, az.rad) - prob *= KleinNishinaAzimuthalScatteringAngleDist(photon_energy, phi).pdf((az.rad - pa) % (2*np.pi)) - - yield from prob + return prob def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInSCFrameInterface]]) -> Iterable[float]: """ @@ -747,10 +751,6 @@ def event_probability(self, query: Iterable[Tuple[PolDirESCPhoton, EmCDSEventInS # Yield the probability for the leftover events yield from self._event_probability(last_photon, last_phi, cached_events) - - - - def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEventInSCFrameInterface]: """ Return a stream of random events, photon by photon. @@ -763,8 +763,7 @@ def random_events(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[EmCDSEve # Random polar (phi) and azimuthal angle from Klein Nishina phi = ThresholdKleinNishinaPolarScatteringAngleDist(energy, self._energy_threshold).rvs() - azimuth = KleinNishinaAzimuthalScatteringAngleDist(energy, phi).rvs() - azimuth += photon.polarization_angle_rad() + azimuth = self._random_az(photon, phi) # Get the measured energy based on phi and the energy resolution and absroption probabity for the photon location measured_energy = MeasuredEnergyDist(energy, self._energy_resolution, phi, full_absorp_prob).rvs() @@ -790,3 +789,14 @@ def effective_area_cm2(self, photons: Iterable[PolDirESCPhoton]) -> Iterable[flo return [a.to_value(u.cm*u.cm) for a in self._effective_area(photons)] +class IdealComptonIRF(UnpolarizedIdealComptonIRF): + + photon_type = PolDirESCPhoton + + def _az_prob(self, photon, phi, az): + pa = photon.polarization_angle_rad + return KleinNishinaAzimuthalScatteringAngleDist(photon.energy, phi).pdf((az - pa) % (2 * np.pi)) + + def _random_az(self, photon, phi): + pa = photon.polarization_angle_rad + return KleinNishinaAzimuthalScatteringAngleDist(photon.energy, phi).rvs() + pa From b44f36bb04f390746c3acaadc292dc3986cd9b20 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 31 Oct 2025 14:56:51 -0400 Subject: [PATCH 119/133] Revert "Added the method apply_gti to SpacecraftHistory" --- cosipy/spacecraftfile/spacecraft_file.py | 49 ------------------------ 1 file changed, 49 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 32ecaa9b..7acea318 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -16,7 +16,6 @@ import pandas as pd from .scatt_map import SpacecraftAttitudeMap -from cosipy.event_selection import GoodTimeInterval from typing import Union, Optional @@ -467,54 +466,6 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis return self.__class__(new_obstime, new_attitude, new_location, new_livetime) - def apply_gti(self, gti: GoodTimeInterval) -> "SpacecraftHistory": - """ - Returns the SpacecraftHistory file class object by masking livetimes outside the good time interval. - - Parameters - ---------- - gti: cosipy.event_selection.GoodTimeInterval - - Returns - ------- - cosipy.spacecraft.SpacecraftHistory - """ - new_obstime = None - new_attitude = None - new_location = None - new_livetime = None - - for i, (start, stop) in enumerate(zip(gti.tstart_list, gti.tstop_list)): - # TODO: this line can be replaced with the following line after the PR in the develop branch is merged. - #for i, (start, stop) in enumerate(gti): - _sph = self.select_interval(start, stop) - - _obstime = _sph.obstime - _attitude = _sph._attitude.as_matrix() - _location = _sph._gcrs.cartesian.xyz - _livetime = _sph.livetime - - if i == 0: - new_obstime = _obstime - new_attitude = _attitude - new_location = _location - new_livetime = _livetime - else: - new_obstime = Time(np.append(new_obstime.jd1, _obstime.jd1), - np.append(new_obstime.jd2, _obstime.jd2), - format='jd') - new_attitude = np.append(new_attitude, _attitude, axis = 0) - new_location = np.append(new_location, _location, axis = 1) - new_livetime = np.append(new_livetime, 0 * new_livetime.unit) # assign livetime of zero between GTIs - new_livetime = np.append(new_livetime, _livetime) - - # finalizing - new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) - new_location = GCRS(x = new_location[0], y = new_location[1], z = new_location[2], - representation_type='cartesian') - new_obstime.format = self.obstime.format - - return self.__class__(new_obstime, new_attitude, new_location, new_livetime) def get_target_in_sc_frame(self, target_coord: SkyCoord) -> SkyCoord: From db9d00045f1b009cad61ccf5d3fe74d92e4735b6 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 3 Nov 2025 10:29:38 -0500 Subject: [PATCH 120/133] Protect unbinned likelihood from ill defined expectation Signed-off-by: Israel Martinez --- cosipy/statistics/likelihood_functions.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cosipy/statistics/likelihood_functions.py b/cosipy/statistics/likelihood_functions.py index 8ab5cd29..21b260fc 100644 --- a/cosipy/statistics/likelihood_functions.py +++ b/cosipy/statistics/likelihood_functions.py @@ -65,6 +65,11 @@ def get_log_like(self) -> float: for density_iter_chunk in itertools_batched(self._expectation.expectation_density(), self._batch_size): density = np.fromiter(density_iter_chunk, dtype=float) + + if np.any(density == 0): + # np.log(0) = -inf for any event, no need to keep iterationg + return -np.inf + density_log_sum += np.sum(np.log(density)) nobservations += density.size From 8f4918ea2c5a1e57872e48e7d98b5f126b1c9064 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 3 Nov 2025 10:58:30 -0500 Subject: [PATCH 121/133] Add missing event type Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 16a2be9d..57b8b094 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -9,7 +9,8 @@ from . import EventWithEnergyInterface from .event import EventInterface, TimeTagEventInterface, \ - ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface, EventWithScatteringAngleInterface + ComptonDataSpaceInSCFrameEventInterface, TimeTagEmCDSEventInSCFrameInterface, EventWithScatteringAngleInterface, \ + EmCDSEventInSCFrameInterface from histpy import Histogram, Axes from astropy.time import Time @@ -86,6 +87,8 @@ def id(self) -> Iterable[int]: @runtime_checkable class TimeTagEventDataInterface(EventDataInterface, Protocol): + event_type = TimeTagEventInterface + def __iter__(self) -> Iterator[TimeTagEventInterface]:... @property @@ -104,6 +107,8 @@ def time(self) -> Time: @runtime_checkable class EventDataWithEnergyInterface(EventDataInterface, Protocol): + event_type = EventWithEnergyInterface + def __iter__(self) -> Iterator[EventWithEnergyInterface]:... @property @@ -120,6 +125,8 @@ def energy(self) -> Quantity: @runtime_checkable class EventDataWithScatteringAngleInterface(EventDataInterface, Protocol): + event_type = EventWithScatteringAngleInterface + def __iter__(self) -> Iterator[EventWithScatteringAngleInterface]:... @property @@ -135,6 +142,8 @@ def scattering_angle(self) -> Angle: @runtime_checkable class ComptonDataSpaceInSCFrameEventDataInterface(EventDataWithScatteringAngleInterface, Protocol): + event_type = ComptonDataSpaceInSCFrameEventInterface + def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... @property @@ -159,7 +168,18 @@ class EventDataInSCFrameInterface(EventDataInterface, Protocol): @property def frame(self) -> SpacecraftFrame:... +@runtime_checkable +class EmCDSEventDataInSCFrameInterface(EventDataWithEnergyInterface, ComptonDataSpaceInSCFrameEventDataInterface, Protocol): + + event_type = EmCDSEventInSCFrameInterface + + def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: ... + +@runtime_checkable class TimeTagEmCDSEventDataInSCFrameInterface(TimeTagEventDataInterface, - EventDataWithEnergyInterface, - ComptonDataSpaceInSCFrameEventDataInterface): + EmCDSEventDataInSCFrameInterface, + Protocol): + + event_type = TimeTagEmCDSEventInSCFrameInterface + def __iter__(self) -> Iterator[TimeTagEmCDSEventInSCFrameInterface]:... From 5d63189f7ca5e74a2a39737c9ff547b03ce717f0 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 3 Nov 2025 10:58:41 -0500 Subject: [PATCH 122/133] Working example of ideal irf Signed-off-by: Israel Martinez --- .../ideal_irf/ideal_irf_line_fit_example.py | 364 ++++++++++++++++++ 1 file changed, 364 insertions(+) create mode 100644 docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py diff --git a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py new file mode 100644 index 00000000..203f118d --- /dev/null +++ b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py @@ -0,0 +1,364 @@ +import cProfile +import itertools +import time +from typing import Iterator, Union, Iterable + +import numpy as np +from astropy.coordinates import SkyCoord, Angle +from astropy.units import Quantity +from cosipy.interfaces import EventInterface, ExpectationDensityInterface +from cosipy.interfaces.data_interface import EventDataInSCFrameInterface, EmCDSEventDataInSCFrameInterface +from cosipy.interfaces.event import EmCDSEventInSCFrameInterface + +from astropy import units as u +from cosipy.interfaces.instrument_response_interface import InstrumentResponseFunctionInterface, \ + FarFieldInstrumentResponseFunctionInterface +from cosipy.polarization import PolarizationConvention, PolarizationAngle, StereographicConvention +from cosipy.response.ideal_response import IdealComptonIRF, UnpolarizedIdealComptonIRF, MeasuredEnergyDist, \ + ThresholdKleinNishinaPolarScatteringAngleDist +from cosipy.response.photon_types import PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention, \ + PhotonWithDirectionAndEnergyInSCFrame +from cosipy.response.relative_coordinates import RelativeCDSCoordinates +from cosipy.statistics import UnbinnedLikelihood +from histpy import Histogram, Axis, HealpixAxis +from matplotlib import pyplot as plt +from scipy.stats import poisson +from scoords import SpacecraftFrame + + +class RandomEventDataFromLineInSCFrame(EmCDSEventDataInSCFrameInterface): + + def __init__(self, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + """ + + Parameters + ---------- + irf: Must handle PhotonWithDirectionAndEnergyInSCFrameInterface + flux: Source flux in unit of 1/area/time + duration: Integration time + energy: Source energy (a line) + direction: Source direction (in SC coordinates) + polarized_irf: Must handle PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface + polarization_degree + polarization_angle + polarization_convention + """ + + unpolarized_irf = irf + + self.event_type = unpolarized_irf.event_type + + flux_cm2_s = flux.to_value(1/u.cm/u.cm/u.s) + duration_s = duration.to_value(u.s) + + energy_keV = energy.to_value(u.keV) + direction = direction.transform_to('spacecraftframe') + source_direction_lon_rad = direction.lon.rad + source_direction_lat_rad = direction.lat.rad + + unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV) + + unpolarized_expected_counts = next(iter(irf.effective_area_cm2([unpolarized_photon]))) * flux_cm2_s * duration_s + + if polarization_degree is None: + polarization_degree = 0 + + if polarization_degree < 0 or polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {polarization_degree}") + + if polarization_degree == 0: + polarized_irf = None + polarized_expected_counts = 0 + + else: + + polarized_irf = polarized_irf + + if polarized_irf.event_type is not unpolarized_irf.event_type: + raise TypeError(f"Both IRF need to have the same event type. Got {unpolarized_irf.event_type} and {polarized_irf.event_type}") + + polarization_angle_rad = PolarizationAngle(polarization_angle, direction, polarization_convention).transform_to('stereographic').angle.rad + + polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV, + polarization_angle_rad) + + unpolarized_expected_counts *= (1 - polarization_degree) + polarized_expected_counts = polarization_degree * next(iter(polarized_irf.effective_area_cm2([polarized_photon]))) * flux_cm2_s * duration_s + + unpolarized_counts = poisson(unpolarized_expected_counts).rvs() + polarized_counts = poisson(polarized_expected_counts).rvs() + + self._events = [] + + unpolarized_events = iter(unpolarized_irf.random_events(itertools.repeat(unpolarized_photon, unpolarized_counts))) + + polarized_events = None + if polarized_counts > 0: + polarized_events = iter(polarized_irf.random_events(itertools.repeat(polarized_photon, polarized_counts))) + + nthrown_unpolarized = 0 + nthrown_polarized = 0 + + while nthrown_unpolarized < unpolarized_counts or nthrown_polarized < polarized_counts: + + if np.random.uniform() < polarization_degree: + # Polarized component + if nthrown_polarized < polarized_counts: + self._events.append(next(polarized_events)) + nthrown_polarized += 1 + else: + # Unpolarized component + if nthrown_unpolarized < unpolarized_counts: + self._events.append(next(unpolarized_events)) + nthrown_unpolarized += 1 + + def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: + """ + Return one Event at a time + """ + yield from self._events + + +irf_pol = IdealComptonIRF.cosi_like() +irf_unpol = UnpolarizedIdealComptonIRF.cosi_like() + +ei = 500*u.keV +source_direction = SkyCoord(lon = 0, lat = 80, unit = 'deg', frame = SpacecraftFrame()) +flux0 = 1/u.cm/u.cm/u.s +duration = 10*u.s +pol_degree = 0 +pol_angle = 80*u.deg +pol_convention = StereographicConvention() + +profile = cProfile.Profile() +profile.enable() +tstart = time.perf_counter() +data = RandomEventDataFromLineInSCFrame(irf = irf_unpol, + flux = flux0, + duration = duration, + energy=ei, + direction = source_direction, + polarized_irf= irf_pol, + polarization_degree=pol_degree, + polarization_angle=pol_angle, + polarization_convention=pol_convention) + +# energy = [] +# phi = [] +# psi = [] # latitude +# chi = [] # longitude +# +# for event in data: +# +# energy.append(event.energy_keV) +# phi.append(event.scattering_angle_rad) +# psi.append(event.scattered_lat_rad_sc) +# chi.append(event.scattered_lon_rad_sc) +# +# print(time.perf_counter() - tstart) +# profile.disable() +# profile.dump_stats("/Users/imartin5/tmp/prof_gen.prof") +# +# print(data.nevents) +# +# energy = Quantity(energy, u.keV) +# phi = Quantity(phi, u.rad) +# psichi = SkyCoord(chi, psi, unit = u.rad, frame = 'spacecraftframe') +# +# binned_data = Histogram([Axis(np.geomspace(10,10000,50)*u.keV, scale = 'log', label='Em'), +# Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='Phi'), +# HealpixAxis(nside = 64, label = 'PsiChi', coordsys='spacecraftframe')]) +# +# binned_data.fill(energy, phi, psichi) +# +# rel_binned_data = Histogram([Axis(np.linspace(-1,1.1,200), scale = 'linear', label='eps'), +# Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='phi'), +# Axis(np.linspace(-180, 180, 180)*u.deg, scale='linear', label='arm'), +# Axis(np.linspace(-180, 180, 180) * u.deg, scale='linear', label='az')]) +# +# eps = ((energy - ei)/ei).to_value('') +# phi_geom,az = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) +# arm = phi_geom - phi +# +# rel_binned_data.fill(eps, phi, arm, az) + +#binned_data.project('PsiChi').plot() +#plt.show() + +class ExpectationFromLineInSCFrame(ExpectationDensityInterface): + + def __init__(self, + data:EmCDSEventDataInSCFrameInterface, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + + self._unpolarized_irf = irf + + self.set_flux(flux) + self._duration_s = duration.to_value(u.s) + self._data = data + direction = direction.transform_to('spacecraftframe') + self._source_direction_lon_rad = direction.lon.rad + self._source_direction_lat_rad = direction.lat.rad + + self._polarization_degree = polarization_degree + + if self._polarization_degree is None: + self._polarization_degree = 0 + + if self._polarization_degree < 0 or self._polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {self._polarization_degree}") + + if self._polarization_degree == 0: + self._polarized_irf = None + self._polarization_angle_rad = None + self._polarization_convention = None + else: + + self._polarized_irf = polarized_irf + + self._polarization_angle_rad = PolarizationAngle(polarization_angle, direction, + polarization_convention).transform_to('stereographic').angle.rad + + + # Build the Photon query as well + self.set_energy(energy) + + # Cache + self._cached_energy_keV = None + self._cached_diff_aeff = None # Per flux unit + self._cached_event_probability = None + + def set_flux(self, flux:Quantity): + self._flux_cm2_s = flux.to_value(1 / u.cm / u.cm / u.s) + + def set_energy(self, energy:Quantity): + self._energy_keV = energy.to_value(u.keV) + self._unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV) + + self._polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention( + self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV, + self._polarization_angle_rad) + + def _update_cache(self): + + if self._cached_energy_keV is None or self._energy_keV != self._cached_energy_keV: + #Either it's the first time or the energy changed + + flux_dur = self._flux_cm2_s * self._duration_s + + unpolarized_diff_aeff = (1 - self._polarization_degree) * next( + iter(self._unpolarized_irf.effective_area_cm2([self._unpolarized_photon]))) + + if self._polarization_degree > 0: + + polarized_diff_aeff = self._polarization_degree * next(iter(self._polarized_irf.effective_area_cm2([self._polarized_photon]))) + + self._cached_diff_aeff = unpolarized_diff_aeff + polarized_diff_aeff + + data1, data2 = itertools.tee(self._data, 2) + + unpol_frac = 1 - self._polarization_degree + + self._cached_event_probability = [unpol_frac * unpol_prob + self._polarization_degree * pol_prob \ + for unpol_prob, pol_prob in \ + zip(self._unpolarized_irf.event_probability([(self._unpolarized_photon,e) for e in data1]), self._polarized_irf.event_probability([(self._polarized_photon,e) for e in data2]))] + + else: + + self._cached_diff_aeff = unpolarized_diff_aeff + + self._cached_event_probability = np.fromiter(self._unpolarized_irf.event_probability([(self._unpolarized_photon, e) for e in self._data]), dtype=float) + + self._cached_energy_keV = self._energy_keV + + def expected_counts(self) -> float: + + self._update_cache() + + return self._cached_diff_aeff * (self._flux_cm2_s * self._duration_s) + + def event_probability(self) -> Iterable[float]: + + self._update_cache() + + yield from self._cached_event_probability + +expectation = ExpectationFromLineInSCFrame(data, + irf=irf_unpol, + flux=flux0, + duration=duration, + energy=ei, + direction=source_direction, + polarized_irf=irf_pol, + polarization_degree=pol_degree, + polarization_angle=pol_angle, + polarization_convention=pol_convention) + + +# Check density +# weighted_rel_binned_data = Histogram(rel_binned_data.axes) +# weighted_rel_binned_data.fill(eps, phi, arm, az, weight = list(expectation.expectation_density())) +# +# phase_space = RelativeCDSCoordinates.get_relative_cds_phase_space(rel_binned_data.axes['phi'].lower_bounds[:,None,None], rel_binned_data.axes['phi'].upper_bounds[:,None,None], +# rel_binned_data.axes['arm'].lower_bounds[None,:,None], rel_binned_data.axes['arm'].upper_bounds[None,:,None], +# rel_binned_data.axes['az'].lower_bounds[None,None,:], rel_binned_data.axes['az'].upper_bounds[None,None,:]) +# +# mean_rel_binned_data = weighted_rel_binned_data * phase_space[None] / rel_binned_data +# mean_rel_binned_data[np.isnan(mean_rel_binned_data.contents)] = 0 +# +# rel_binned_data.project('eps').plot() +# mean_rel_binned_data.project('eps').plot() +# +# plt.show() + + +likelihood = UnbinnedLikelihood(expectation) + +loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), + Axis(np.linspace(498, 502, 10)*u.keV, label = 'Ei')]) + +profile = cProfile.Profile() +profile.enable() +tstart = time.perf_counter() +for j, ei_j in enumerate(loglike.axes['Ei'].centers): + print(j) + expectation.set_energy(ei_j) + for i,flux_i in enumerate(loglike.axes['flux'].centers): + + expectation.set_flux(flux_i) + + loglike[i,j] = likelihood.get_log_like() + + +print(time.perf_counter() - tstart) +profile.disable() +profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") + +(loglike - np.max(loglike)).plot(vmin = -25) + +plt.show() \ No newline at end of file From d8f154706a6d2ef3645637f99908389a5c9d06c2 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 3 Nov 2025 15:46:42 -0500 Subject: [PATCH 123/133] convenience function to get array of all values Signed-off-by: Israel Martinez --- cosipy/interfaces/data_interface.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/cosipy/interfaces/data_interface.py b/cosipy/interfaces/data_interface.py index 57b8b094..4834c056 100644 --- a/cosipy/interfaces/data_interface.py +++ b/cosipy/interfaces/data_interface.py @@ -92,10 +92,12 @@ class TimeTagEventDataInterface(EventDataInterface, Protocol): def __iter__(self) -> Iterator[TimeTagEventInterface]:... @property - def jd1(self) -> Iterable[float]: ... + def jd1(self) -> Iterable[float]: + return [e.jd1 for e in self] @property - def jd2(self) -> Iterable[float]: ... + def jd2(self) -> Iterable[float]: + return [e.jd2 for e in self] @property def time(self) -> Time: @@ -112,14 +114,15 @@ class EventDataWithEnergyInterface(EventDataInterface, Protocol): def __iter__(self) -> Iterator[EventWithEnergyInterface]:... @property - def energy_rad(self) -> Iterable[float]:... + def energy_keV(self) -> Iterable[float]: + return [e.energy_keV for e in self] @property def energy(self) -> Quantity: """ Add fancy energy quantity """ - return Quantity(self.energy_rad, u.rad) + return Quantity(self.energy_keV, u.keV) @runtime_checkable @@ -130,7 +133,8 @@ class EventDataWithScatteringAngleInterface(EventDataInterface, Protocol): def __iter__(self) -> Iterator[EventWithScatteringAngleInterface]:... @property - def scattering_angle_rad(self) -> Iterable[float]: ... + def scattering_angle_rad(self) -> Iterable[float]: + return [e.scattering_angle_rad for e in self] @property def scattering_angle(self) -> Angle: @@ -147,10 +151,12 @@ class ComptonDataSpaceInSCFrameEventDataInterface(EventDataWithScatteringAngleIn def __iter__(self) -> Iterator[ComptonDataSpaceInSCFrameEventInterface]:... @property - def scattered_lon_rad_sc(self) -> Iterable[float]: ... + def scattered_lon_rad_sc(self) -> Iterable[float]: + return [e.scattered_lon_rad_sc for e in self] @property - def scattered_lat_rad_sc(self) -> Iterable[float]: ... + def scattered_lat_rad_sc(self) -> Iterable[float]: + return [e.scattered_lat_rad_sc for e in self] @property def scattered_direction_sc(self) -> SkyCoord: From fc913af7285d1be77588bfec92cf10cfcf6c80b8 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 3 Nov 2025 15:47:09 -0500 Subject: [PATCH 124/133] Ideal response analysis. All working. Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 275 +++++++++++- .../ideal_irf/ideal_irf_line_fit_example.py | 408 ++++++------------ 2 files changed, 395 insertions(+), 288 deletions(-) diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index 303d8530..f76cbcd4 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -1,16 +1,19 @@ +import itertools import warnings from collections.abc import Callable -from typing import Iterable, Tuple, Union +from typing import Iterable, Tuple, Union, Iterator from astropy.coordinates import Angle, SkyCoord from astropy.units import Quantity from cosipy.data_io.EmCDSUnbinnedData import EmCDSEventInSCFrame -from cosipy.polarization import StereographicConvention +from cosipy.interfaces import ExpectationDensityInterface +from cosipy.interfaces.data_interface import EmCDSEventDataInSCFrameInterface +from cosipy.polarization import StereographicConvention, PolarizationConvention, PolarizationAngle from cosipy.response.relative_coordinates import RelativeCDSCoordinates from more_itertools.more import sample from numpy._typing import NDArray from scipy.optimize import minimize_scalar -from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint +from scipy.stats import rv_continuous, truncnorm, norm, uniform, randint, poisson from scipy.stats.sampling import SimpleRatioUniforms import astropy.units as u import numpy as np @@ -21,7 +24,7 @@ PhotonWithEnergyInterface, PhotonWithDirectionInSCFrameInterface from cosipy.response.photon_types import \ PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface as PolDirESCPhoton, \ - PhotonWithDirectionAndEnergyInSCFrame + PhotonWithDirectionAndEnergyInSCFrame, PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention from scipy.special import erfi, erf from cosipy.util.iterables import itertools_batched @@ -800,3 +803,267 @@ def _az_prob(self, photon, phi, az): def _random_az(self, photon, phi): pa = photon.polarization_angle_rad return KleinNishinaAzimuthalScatteringAngleDist(photon.energy, phi).rvs() + pa + +class RandomEventDataFromLineInSCFrame(EmCDSEventDataInSCFrameInterface): + + def __init__(self, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + """ + + Parameters + ---------- + irf: Must handle PhotonWithDirectionAndEnergyInSCFrameInterface + flux: Source flux in unit of 1/area/time + duration: Integration time + energy: Source energy (a line) + direction: Source direction (in SC coordinates) + polarized_irf: Must handle PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface + polarization_degree + polarization_angle + polarization_convention + """ + + unpolarized_irf = irf + + self.event_type = unpolarized_irf.event_type + + flux_cm2_s = flux.to_value(1/u.cm/u.cm/u.s) + duration_s = duration.to_value(u.s) + + energy_keV = energy.to_value(u.keV) + direction = direction.transform_to('spacecraftframe') + source_direction_lon_rad = direction.lon.rad + source_direction_lat_rad = direction.lat.rad + + unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV) + + unpolarized_expected_counts = next(iter(irf.effective_area_cm2([unpolarized_photon]))) * flux_cm2_s * duration_s + + if polarization_degree is None: + polarization_degree = 0 + + if polarization_degree < 0 or polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {polarization_degree}") + + if polarization_degree == 0: + polarized_irf = None + polarized_expected_counts = 0 + + else: + + polarized_irf = polarized_irf + + if polarized_irf.event_type is not unpolarized_irf.event_type: + raise TypeError(f"Both IRF need to have the same event type. Got {unpolarized_irf.event_type} and {polarized_irf.event_type}") + + polarization_angle_rad = PolarizationAngle(polarization_angle, direction, polarization_convention).transform_to('stereographic').angle.rad + + polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(source_direction_lon_rad, + source_direction_lat_rad, + energy_keV, + polarization_angle_rad) + + unpolarized_expected_counts *= (1 - polarization_degree) + polarized_expected_counts = polarization_degree * next(iter(polarized_irf.effective_area_cm2([polarized_photon]))) * flux_cm2_s * duration_s + + unpolarized_counts = poisson(unpolarized_expected_counts).rvs() + polarized_counts = poisson(polarized_expected_counts).rvs() + + self._events = [] + + unpolarized_events = iter(unpolarized_irf.random_events(itertools.repeat(unpolarized_photon, unpolarized_counts))) + + polarized_events = None + if polarized_counts > 0: + polarized_events = iter(polarized_irf.random_events(itertools.repeat(polarized_photon, polarized_counts))) + + nthrown_unpolarized = 0 + nthrown_polarized = 0 + + while nthrown_unpolarized < unpolarized_counts or nthrown_polarized < polarized_counts: + + if np.random.uniform() < polarization_degree: + # Polarized component + if nthrown_polarized < polarized_counts: + self._events.append(next(polarized_events)) + nthrown_polarized += 1 + else: + # Unpolarized component + if nthrown_unpolarized < unpolarized_counts: + self._events.append(next(unpolarized_events)) + nthrown_unpolarized += 1 + + def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: + """ + Return one Event at a time + """ + yield from self._events + +class ExpectationFromLineInSCFrame(ExpectationDensityInterface): + + def __init__(self, + data:EmCDSEventDataInSCFrameInterface, + irf:FarFieldInstrumentResponseFunctionInterface, + flux:Quantity, + duration:Quantity, + energy:Quantity, + direction:SkyCoord, + polarized_irf:FarFieldInstrumentResponseFunctionInterface, + polarization_degree:float = None, + polarization_angle:Union[Angle, Quantity] = None, + polarization_convention:PolarizationConvention = None): + + self._unpolarized_irf = irf + self._polarized_irf = polarized_irf + + self._duration_s = duration.to_value(u.s) + self._data = data + + self._flux_cm2_s = None + self._energy_keV = None + self._direction = None + self._source_direction_lon_rad = None + self._source_direction_lat_rad = None + self._polarization_degree = None + self._polarization_angle_rad = None + self._polarization_convention = None + self._unpolarized_photon = None + self._polarized_photon = None + self.set_model(flux = flux, + energy= energy, + direction=direction, + polarization_degree=polarization_degree, + polarization_angle=polarization_angle, + polarization_convention = polarization_convention) + + # Cache + self._cached_energy_keV = None + self._cached_direction = None + self._cached_pol_angle_rad = None + self._cached_pol_degree = None + self._cached_diff_aeff = None # Per flux unit + self._cached_event_probability = None + self._cached_event_probability_unpolarized = None + self._cached_event_probability_polarized = None + + def set_model(self, + flux:Quantity = None, + energy:Quantity = None, + direction:SkyCoord = None, + polarization_degree: float = None, + polarization_angle: Union[Angle, Quantity] = None, + polarization_convention: PolarizationConvention = None + ): + """ + Parameters not set default to current values + """ + + if flux is not None: + self._flux_cm2_s = flux.to_value(1 / u.cm / u.cm / u.s) + + if energy is not None: + self._energy_keV = energy.to_value(u.keV) + + if direction is not None: + direction = direction.transform_to('spacecraftframe') + self._direction = direction + self._source_direction_lon_rad = direction.lon.rad + self._source_direction_lat_rad = direction.lat.rad + + if polarization_degree is not None: + self._polarization_degree = polarization_degree + + if self._polarization_degree is None: + self._polarization_degree = 0 + + if self._polarization_degree < 0 or self._polarization_degree > 1: + raise ValueError(f"polarization_degree must lie between 0 and 1. Got {self._polarization_degree}") + + if self._polarization_degree > 0: + + if self._polarized_irf is None: + raise ValueError("Polarization degree >0 but polarized IRF is None") + + if polarization_convention is not None: + self._polarization_convention = polarization_convention + + if polarization_angle is not None: + self._polarization_angle_rad = PolarizationAngle(polarization_angle, self._direction, + self._polarization_convention).transform_to('stereographic').angle.rad + + self._unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV) + + self._polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention( + self._source_direction_lon_rad, + self._source_direction_lat_rad, + self._energy_keV, + self._polarization_angle_rad) + + def _update_cache(self): + + if (self._cached_energy_keV is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction + or self._polarization_angle_rad != self._cached_pol_angle_rad + or self._polarization_degree != self._cached_pol_degree): + #Either it's the first time or the energy changed + + unpolarized_diff_aeff = (1 - self._polarization_degree) * next( + iter(self._unpolarized_irf.effective_area_cm2([self._unpolarized_photon]))) + + if (self._cached_event_probability_unpolarized is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction): + # Energy or direction can affect the unpolarized response, but not PA nor PD + self._cached_event_probability_unpolarized = np.fromiter(self._unpolarized_irf.event_probability([(self._unpolarized_photon, e) for e in self._data]),dtype=float) + + if self._polarization_degree > 0: + + polarized_diff_aeff = self._polarization_degree * next(iter(self._polarized_irf.effective_area_cm2([self._polarized_photon]))) + + self._cached_diff_aeff = unpolarized_diff_aeff + polarized_diff_aeff + + if (self._cached_event_probability_polarized is None + or self._energy_keV != self._cached_energy_keV + or self._direction != self._cached_direction + or self._polarization_angle_rad != self._cached_pol_angle_rad): + # Energy, direction or PA can affect the unpolarized response, but not PD + self._cached_event_probability_polarized = np.fromiter(self._polarized_irf.event_probability([(self._polarized_photon, e) for e in self._data]), dtype=float) + + self._cached_event_probability = ( 1 - self._polarization_degree) * self._cached_event_probability_unpolarized + self._polarization_degree * self._cached_event_probability_polarized + + else: + + self._cached_diff_aeff = unpolarized_diff_aeff + + self._cached_event_probability = self._cached_event_probability_unpolarized + + self._cached_energy_keV = self._energy_keV + self._cached_direction = self._direction + self._cached_pol_angle_rad = self._polarization_angle_rad + self._cached_pol_degree = self._polarization_degree + + def expected_counts(self) -> float: + + self._update_cache() + + return self._cached_diff_aeff * (self._flux_cm2_s * self._duration_s) + + def event_probability(self) -> Iterable[float]: + + self._update_cache() + + yield from self._cached_event_probability + diff --git a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py index 203f118d..51b4dcb1 100644 --- a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py +++ b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py @@ -1,151 +1,34 @@ import cProfile -import itertools import time -from typing import Iterator, Union, Iterable import numpy as np from astropy.coordinates import SkyCoord, Angle -from astropy.units import Quantity -from cosipy.interfaces import EventInterface, ExpectationDensityInterface -from cosipy.interfaces.data_interface import EventDataInSCFrameInterface, EmCDSEventDataInSCFrameInterface -from cosipy.interfaces.event import EmCDSEventInSCFrameInterface from astropy import units as u -from cosipy.interfaces.instrument_response_interface import InstrumentResponseFunctionInterface, \ - FarFieldInstrumentResponseFunctionInterface -from cosipy.polarization import PolarizationConvention, PolarizationAngle, StereographicConvention -from cosipy.response.ideal_response import IdealComptonIRF, UnpolarizedIdealComptonIRF, MeasuredEnergyDist, \ - ThresholdKleinNishinaPolarScatteringAngleDist -from cosipy.response.photon_types import PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention, \ - PhotonWithDirectionAndEnergyInSCFrame +from cosipy.polarization import StereographicConvention +from cosipy.response.ideal_response import IdealComptonIRF, UnpolarizedIdealComptonIRF, ExpectationFromLineInSCFrame, RandomEventDataFromLineInSCFrame from cosipy.response.relative_coordinates import RelativeCDSCoordinates from cosipy.statistics import UnbinnedLikelihood from histpy import Histogram, Axis, HealpixAxis from matplotlib import pyplot as plt -from scipy.stats import poisson from scoords import SpacecraftFrame - -class RandomEventDataFromLineInSCFrame(EmCDSEventDataInSCFrameInterface): - - def __init__(self, - irf:FarFieldInstrumentResponseFunctionInterface, - flux:Quantity, - duration:Quantity, - energy:Quantity, - direction:SkyCoord, - polarized_irf:FarFieldInstrumentResponseFunctionInterface, - polarization_degree:float = None, - polarization_angle:Union[Angle, Quantity] = None, - polarization_convention:PolarizationConvention = None): - """ - - Parameters - ---------- - irf: Must handle PhotonWithDirectionAndEnergyInSCFrameInterface - flux: Source flux in unit of 1/area/time - duration: Integration time - energy: Source energy (a line) - direction: Source direction (in SC coordinates) - polarized_irf: Must handle PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConventionInterface - polarization_degree - polarization_angle - polarization_convention - """ - - unpolarized_irf = irf - - self.event_type = unpolarized_irf.event_type - - flux_cm2_s = flux.to_value(1/u.cm/u.cm/u.s) - duration_s = duration.to_value(u.s) - - energy_keV = energy.to_value(u.keV) - direction = direction.transform_to('spacecraftframe') - source_direction_lon_rad = direction.lon.rad - source_direction_lat_rad = direction.lat.rad - - unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(source_direction_lon_rad, - source_direction_lat_rad, - energy_keV) - - unpolarized_expected_counts = next(iter(irf.effective_area_cm2([unpolarized_photon]))) * flux_cm2_s * duration_s - - if polarization_degree is None: - polarization_degree = 0 - - if polarization_degree < 0 or polarization_degree > 1: - raise ValueError(f"polarization_degree must lie between 0 and 1. Got {polarization_degree}") - - if polarization_degree == 0: - polarized_irf = None - polarized_expected_counts = 0 - - else: - - polarized_irf = polarized_irf - - if polarized_irf.event_type is not unpolarized_irf.event_type: - raise TypeError(f"Both IRF need to have the same event type. Got {unpolarized_irf.event_type} and {polarized_irf.event_type}") - - polarization_angle_rad = PolarizationAngle(polarization_angle, direction, polarization_convention).transform_to('stereographic').angle.rad - - polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention(source_direction_lon_rad, - source_direction_lat_rad, - energy_keV, - polarization_angle_rad) - - unpolarized_expected_counts *= (1 - polarization_degree) - polarized_expected_counts = polarization_degree * next(iter(polarized_irf.effective_area_cm2([polarized_photon]))) * flux_cm2_s * duration_s - - unpolarized_counts = poisson(unpolarized_expected_counts).rvs() - polarized_counts = poisson(polarized_expected_counts).rvs() - - self._events = [] - - unpolarized_events = iter(unpolarized_irf.random_events(itertools.repeat(unpolarized_photon, unpolarized_counts))) - - polarized_events = None - if polarized_counts > 0: - polarized_events = iter(polarized_irf.random_events(itertools.repeat(polarized_photon, polarized_counts))) - - nthrown_unpolarized = 0 - nthrown_polarized = 0 - - while nthrown_unpolarized < unpolarized_counts or nthrown_polarized < polarized_counts: - - if np.random.uniform() < polarization_degree: - # Polarized component - if nthrown_polarized < polarized_counts: - self._events.append(next(polarized_events)) - nthrown_polarized += 1 - else: - # Unpolarized component - if nthrown_unpolarized < unpolarized_counts: - self._events.append(next(unpolarized_events)) - nthrown_unpolarized += 1 - - def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: - """ - Return one Event at a time - """ - yield from self._events - +from mhealpy import HealpixMap irf_pol = IdealComptonIRF.cosi_like() irf_unpol = UnpolarizedIdealComptonIRF.cosi_like() ei = 500*u.keV -source_direction = SkyCoord(lon = 0, lat = 80, unit = 'deg', frame = SpacecraftFrame()) +source_direction = SkyCoord(lon = 0, lat = 60, unit = 'deg', frame = SpacecraftFrame()) flux0 = 1/u.cm/u.cm/u.s duration = 10*u.s -pol_degree = 0 +pol_degree = .7 pol_angle = 80*u.deg pol_convention = StereographicConvention() -profile = cProfile.Profile() -profile.enable() -tstart = time.perf_counter() +# profile = cProfile.Profile() +# profile.enable() +# tstart = time.perf_counter() data = RandomEventDataFromLineInSCFrame(irf = irf_unpol, flux = flux0, duration = duration, @@ -156,209 +39,166 @@ def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: polarization_angle=pol_angle, polarization_convention=pol_convention) -# energy = [] -# phi = [] -# psi = [] # latitude -# chi = [] # longitude -# -# for event in data: -# -# energy.append(event.energy_keV) -# phi.append(event.scattering_angle_rad) -# psi.append(event.scattered_lat_rad_sc) -# chi.append(event.scattered_lon_rad_sc) -# -# print(time.perf_counter() - tstart) -# profile.disable() -# profile.dump_stats("/Users/imartin5/tmp/prof_gen.prof") -# -# print(data.nevents) -# -# energy = Quantity(energy, u.keV) -# phi = Quantity(phi, u.rad) -# psichi = SkyCoord(chi, psi, unit = u.rad, frame = 'spacecraftframe') -# -# binned_data = Histogram([Axis(np.geomspace(10,10000,50)*u.keV, scale = 'log', label='Em'), -# Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='Phi'), -# HealpixAxis(nside = 64, label = 'PsiChi', coordsys='spacecraftframe')]) -# -# binned_data.fill(energy, phi, psichi) -# -# rel_binned_data = Histogram([Axis(np.linspace(-1,1.1,200), scale = 'linear', label='eps'), -# Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='phi'), -# Axis(np.linspace(-180, 180, 180)*u.deg, scale='linear', label='arm'), -# Axis(np.linspace(-180, 180, 180) * u.deg, scale='linear', label='az')]) -# -# eps = ((energy - ei)/ei).to_value('') -# phi_geom,az = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) -# arm = phi_geom - phi -# -# rel_binned_data.fill(eps, phi, arm, az) - -#binned_data.project('PsiChi').plot() -#plt.show() - -class ExpectationFromLineInSCFrame(ExpectationDensityInterface): - - def __init__(self, - data:EmCDSEventDataInSCFrameInterface, - irf:FarFieldInstrumentResponseFunctionInterface, - flux:Quantity, - duration:Quantity, - energy:Quantity, - direction:SkyCoord, - polarized_irf:FarFieldInstrumentResponseFunctionInterface, - polarization_degree:float = None, - polarization_angle:Union[Angle, Quantity] = None, - polarization_convention:PolarizationConvention = None): +measured_energy = data.energy +phi = data.scattering_angle +psichi = data.scattered_direction_sc - self._unpolarized_irf = irf +fig,ax = plt.subplots(subplot_kw = {'projection':'mollview'}) +sc = ax.scatter(psichi.lon.deg, psichi.lat.deg, transform = ax.get_transform('world'), c = phi*180/np.pi, cmap = 'inferno', + s = 2, vmin = 0, vmax = 180) +ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform = ax.get_transform('world'), marker = 'x', s = 100, c = 'red') +fig.colorbar(sc, orientation="horizontal", fraction = .02, label = "$\phi$ [deg]") - self.set_flux(flux) - self._duration_s = duration.to_value(u.s) - self._data = data - direction = direction.transform_to('spacecraftframe') - self._source_direction_lon_rad = direction.lon.rad - self._source_direction_lat_rad = direction.lat.rad +eps = ((measured_energy - ei)/ei).to_value('') +phi_geom,az = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) +theta_arm = phi_geom - phi - self._polarization_degree = polarization_degree +rel_binned_data = Histogram([Axis(np.linspace(-1,1.1,200), scale = 'linear', label='eps'), + Axis(np.linspace(0, 180, 180)*u.deg, scale='linear', label='phi'), + Axis(np.linspace(-180, 180, 180)*u.deg, scale='linear', label='arm'), + Axis(np.linspace(-180, 180, 180) * u.deg, scale='linear', label='az')]) - if self._polarization_degree is None: - self._polarization_degree = 0 +rel_binned_data.fill(eps, phi, theta_arm, az) - if self._polarization_degree < 0 or self._polarization_degree > 1: - raise ValueError(f"polarization_degree must lie between 0 and 1. Got {self._polarization_degree}") +fig,ax = plt.subplots(2, 3, figsize = [18,8]) - if self._polarization_degree == 0: - self._polarized_irf = None - self._polarization_angle_rad = None - self._polarization_convention = None - else: +rel_binned_data.project('eps').plot(ax[0,0],errorbars = True) +rel_binned_data.slice[{'phi':slice(30,120)}].project('az').rebin(5).plot(ax[1,0],errorbars = True) - self._polarized_irf = polarized_irf +rel_binned_data.project(['arm','phi']).rebin(3,5).plot(ax[0,1]) +rel_binned_data.project('phi').rebin(5).plot(ax[0,2],errorbars = True) +rel_binned_data.project('arm').rebin(3).plot(ax[1,1],errorbars = True) - self._polarization_angle_rad = PolarizationAngle(polarization_angle, direction, - polarization_convention).transform_to('stereographic').angle.rad +plt.show() - # Build the Photon query as well - self.set_energy(energy) - # Cache - self._cached_energy_keV = None - self._cached_diff_aeff = None # Per flux unit - self._cached_event_probability = None +expectation = ExpectationFromLineInSCFrame(data, + irf=irf_unpol, + flux=flux0, + duration=duration, + energy=ei, + direction=source_direction, + polarized_irf=irf_pol, + polarization_degree=pol_degree, + polarization_angle=pol_angle, + polarization_convention=pol_convention) - def set_flux(self, flux:Quantity): - self._flux_cm2_s = flux.to_value(1 / u.cm / u.cm / u.s) - def set_energy(self, energy:Quantity): - self._energy_keV = energy.to_value(u.keV) - self._unpolarized_photon = PhotonWithDirectionAndEnergyInSCFrame(self._source_direction_lon_rad, - self._source_direction_lat_rad, - self._energy_keV) - self._polarized_photon = PolarizedPhotonWithDirectionAndEnergyInSCFrameStereographicConvention( - self._source_direction_lon_rad, - self._source_direction_lat_rad, - self._energy_keV, - self._polarization_angle_rad) +likelihood = UnbinnedLikelihood(expectation) - def _update_cache(self): +# ==== Free the source energy ==== +if False: + # Set everything to the injection values + expectation.set_model(flux=flux0, + energy=ei, + direction=source_direction, + polarization_degree=pol_degree, + polarization_angle=pol_angle) - if self._cached_energy_keV is None or self._energy_keV != self._cached_energy_keV: - #Either it's the first time or the energy changed + loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), + Axis(np.linspace(498, 502, 10)*u.keV, label = 'Ei')]) - flux_dur = self._flux_cm2_s * self._duration_s + profile = cProfile.Profile() + profile.enable() + tstart = time.perf_counter() + for j, ei_j in enumerate(loglike.axes['Ei'].centers): + print(j) + for i,flux_i in enumerate(loglike.axes['flux'].centers): - unpolarized_diff_aeff = (1 - self._polarization_degree) * next( - iter(self._unpolarized_irf.effective_area_cm2([self._unpolarized_photon]))) + expectation.set_model(flux = flux_i, energy = ei_j) - if self._polarization_degree > 0: + loglike[i,j] = likelihood.get_log_like() - polarized_diff_aeff = self._polarization_degree * next(iter(self._polarized_irf.effective_area_cm2([self._polarized_photon]))) - self._cached_diff_aeff = unpolarized_diff_aeff + polarized_diff_aeff + print(time.perf_counter() - tstart) + profile.disable() + profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") - data1, data2 = itertools.tee(self._data, 2) + (loglike - np.max(loglike)).plot(vmin = -25) - unpol_frac = 1 - self._polarization_degree + plt.show() - self._cached_event_probability = [unpol_frac * unpol_prob + self._polarization_degree * pol_prob \ - for unpol_prob, pol_prob in \ - zip(self._unpolarized_irf.event_probability([(self._unpolarized_photon,e) for e in data1]), self._polarized_irf.event_probability([(self._polarized_photon,e) for e in data2]))] - else: +# ==== Free the source direction ==== +if False: + # Set everything to the injection values + expectation.set_model(flux=flux0, + energy=ei, + direction=source_direction, + polarization_degree=pol_degree, + polarization_angle=pol_angle) - self._cached_diff_aeff = unpolarized_diff_aeff + loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), + HealpixAxis(nside = 32, label = 'direction', coordsys=SpacecraftFrame())]) - self._cached_event_probability = np.fromiter(self._unpolarized_irf.event_probability([(self._unpolarized_photon, e) for e in self._data]), dtype=float) + loglike[:] = np.nan - self._cached_energy_keV = self._energy_keV + profile = cProfile.Profile() + profile.enable() + tstart = time.perf_counter() + sample_pixels = loglike.axes['direction'].query_disc(source_direction.cartesian.xyz, np.deg2rad(10)) + for j, pix in enumerate(sample_pixels): - def expected_counts(self) -> float: + print(j,len(sample_pixels)) - self._update_cache() + coord_pix = loglike.axes['direction'].pix2skycoord(pix) - return self._cached_diff_aeff * (self._flux_cm2_s * self._duration_s) + for i,flux_i in enumerate(loglike.axes['flux'].centers): - def event_probability(self) -> Iterable[float]: + expectation.set_model(flux = flux_i, direction = coord_pix) - self._update_cache() + loglike[i,pix] = likelihood.get_log_like() - yield from self._cached_event_probability -expectation = ExpectationFromLineInSCFrame(data, - irf=irf_unpol, - flux=flux0, - duration=duration, - energy=ei, - direction=source_direction, - polarized_irf=irf_pol, - polarization_degree=pol_degree, - polarization_angle=pol_angle, - polarization_convention=pol_convention) + print(time.perf_counter() - tstart) + profile.disable() + profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") + direction_profile_loglike = HealpixMap(np.nanmax(loglike, axis = 0)) + direction_profile_loglike.plot() -# Check density -# weighted_rel_binned_data = Histogram(rel_binned_data.axes) -# weighted_rel_binned_data.fill(eps, phi, arm, az, weight = list(expectation.expectation_density())) -# -# phase_space = RelativeCDSCoordinates.get_relative_cds_phase_space(rel_binned_data.axes['phi'].lower_bounds[:,None,None], rel_binned_data.axes['phi'].upper_bounds[:,None,None], -# rel_binned_data.axes['arm'].lower_bounds[None,:,None], rel_binned_data.axes['arm'].upper_bounds[None,:,None], -# rel_binned_data.axes['az'].lower_bounds[None,None,:], rel_binned_data.axes['az'].upper_bounds[None,None,:]) -# -# mean_rel_binned_data = weighted_rel_binned_data * phase_space[None] / rel_binned_data -# mean_rel_binned_data[np.isnan(mean_rel_binned_data.contents)] = 0 -# -# rel_binned_data.project('eps').plot() -# mean_rel_binned_data.project('eps').plot() -# -# plt.show() + flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = 1)) + (flux_prof_loglike - np.nanmin(flux_prof_loglike)).plot() + plt.show() -likelihood = UnbinnedLikelihood(expectation) +# ==== Free PD and PA ==== +if True: + # Set everything to the injection values + expectation.set_model(flux=flux0, + energy=ei, + direction=source_direction, + polarization_degree=pol_degree, + polarization_angle=pol_angle) -loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), - Axis(np.linspace(498, 502, 10)*u.keV, label = 'Ei')]) + loglike = Histogram([Axis(np.linspace(.5, 1.5, 11) / u.cm / u.cm / u.s, label='flux'), + Axis(np.linspace(40,120,10)*u.deg, label='PA'), + Axis(np.linspace(0, 1, 11), label='PD'), + ]) -profile = cProfile.Profile() -profile.enable() -tstart = time.perf_counter() -for j, ei_j in enumerate(loglike.axes['Ei'].centers): - print(j) - expectation.set_energy(ei_j) - for i,flux_i in enumerate(loglike.axes['flux'].centers): + for j, pa_j in enumerate(loglike.axes['PA'].centers): + print(j) + for k, pd_k in enumerate(loglike.axes['PD'].centers): + for i,flux_i in enumerate(loglike.axes['flux'].centers): - expectation.set_flux(flux_i) + expectation.set_model(flux = flux_i, + polarization_degree = pd_k, + polarization_angle = pa_j) - loglike[i,j] = likelihood.get_log_like() + loglike[i,j,k] = likelihood.get_log_like() + flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = (1,2))) + ts_flux = 2 * (flux_prof_loglike - np.nanmin(flux_prof_loglike)) + ax,_ = ts_flux.plot() + ax.set_ylabel("TS") -print(time.perf_counter() - tstart) -profile.disable() -profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") + pol_prof_loglike = Histogram([loglike.axes['PA'], loglike.axes['PD']], contents=np.nanmax(loglike, axis=0)) -(loglike - np.max(loglike)).plot(vmin = -25) + ts_pol = 2 * (pol_prof_loglike - np.nanmax(pol_prof_loglike)) + ax, _ = ts_pol.plot(vmin=-4.61) + ax.scatter(pol_angle.to_value(u.deg), pol_degree, color='red') + ax.get_figure().axes[-1].set_ylabel("TS") -plt.show() \ No newline at end of file + plt.show() \ No newline at end of file From 0dcbddbf7f1eae72957f38149706e1854dc7bfc8 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 4 Nov 2025 09:53:28 -0500 Subject: [PATCH 125/133] Match FWHM 3deg Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index f76cbcd4..c1e4bf59 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -633,16 +633,17 @@ def cosi_like(cls, """ - max_area = 110 * u.cm * u.cm if max_area is None else max_area - max_area_energy = 1500 * u.keV if max_area_energy is None else max_area_energy - sigma_decades = 0.4 if sigma_decades is None else sigma_decades - energy_resolution = 0.01 if energy_resolution is None else energy_resolution - angres = 3 * u.deg if angres is None else angres - angres_fact = [1 / 3., 1, 3, 9] if angres_fact is None else angres_fact - angres_weights = [1, 4, 10, 20] if angres_weights is None else angres_weights - full_absorption_constant = 0.7 if full_absorption_constant is None else full_absorption_constant - full_absorption_exp_cutoff = 10 * u.MeV if full_absorption_exp_cutoff is None else full_absorption_exp_cutoff - energy_threshold = 20 * u.keV if energy_threshold is None else energy_threshold + # This angres_fact give a FWHM approx = angres, but with long tails + max_area = 110 * u.cm * u.cm if max_area is None else max_area + max_area_energy = 1500 * u.keV if max_area_energy is None else max_area_energy + sigma_decades = 0.4 if sigma_decades is None else sigma_decades + energy_resolution = 0.01 if energy_resolution is None else energy_resolution + angres = 3 * u.deg if angres is None else angres + angres_fact = np.asarray([1/3.,1,3,9,27])/3 if angres_fact is None else angres_fact + angres_weights = np.asarray([1,4,5,20,30]) if angres_weights is None else angres_weights + full_absorption_constant = 0.7 if full_absorption_constant is None else full_absorption_constant + full_absorption_exp_cutoff = 10 * u.MeV if full_absorption_exp_cutoff is None else full_absorption_exp_cutoff + energy_threshold = 20 * u.keV if energy_threshold is None else energy_threshold angres_fact = np.asarray(angres_fact) angres_weights = np.asarray(angres_weights) From 15ddd5e14b9848b0b06b862e6cb7a3bb8ffeb15a Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 4 Nov 2025 09:54:49 -0500 Subject: [PATCH 126/133] Clean example a little bit Signed-off-by: Israel Martinez --- .../ideal_irf/ideal_irf_line_fit_example.py | 247 ++++++++++++------ 1 file changed, 164 insertions(+), 83 deletions(-) diff --git a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py index 51b4dcb1..c7583cad 100644 --- a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py +++ b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py @@ -1,3 +1,10 @@ +import logging +logging.basicConfig( + level=logging.INFO, # Set the logging level to INFO + format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s' +) +logger = logging.getLogger(__name__) + import cProfile import time @@ -14,43 +21,79 @@ from scoords import SpacecraftFrame from mhealpy import HealpixMap +from tqdm import tqdm -irf_pol = IdealComptonIRF.cosi_like() -irf_unpol = UnpolarizedIdealComptonIRF.cosi_like() +#plt.ion() -ei = 500*u.keV +# ==== Initial setup ==== +# Simulated source parameters +source_energy = 500 * u.keV source_direction = SkyCoord(lon = 0, lat = 60, unit = 'deg', frame = SpacecraftFrame()) -flux0 = 1/u.cm/u.cm/u.s -duration = 10*u.s -pol_degree = .7 -pol_angle = 80*u.deg +source_flux = 1 / u.cm / u.cm / u.s +source_pd = .7 +source_pa = 80 * u.deg pol_convention = StereographicConvention() +# The integrated time of the observation. Increase it to get more statistics +# which is good for visualizing the data, but that will maka the analysis slower +duration = 10*u.s + +# Instrument Response Function (IRF) definitions +# The "unpolarized" response returns an average over all +# polarization angles. +irf_pol = IdealComptonIRF.cosi_like() +irf_unpol = UnpolarizedIdealComptonIRF.cosi_like() + +# Simulate data sampling from the IRF itself +# This simulated a monochromatic source at a fixed direction in the SC coordinate frame + # profile = cProfile.Profile() # profile.enable() # tstart = time.perf_counter() +logger.info("Simulating data...") data = RandomEventDataFromLineInSCFrame(irf = irf_unpol, - flux = flux0, + flux = source_flux, duration = duration, - energy=ei, + energy=source_energy, direction = source_direction, polarized_irf= irf_pol, - polarization_degree=pol_degree, - polarization_angle=pol_angle, + polarization_degree=source_pd, + polarization_angle=source_pa, polarization_convention=pol_convention) +# Get the measured energy (Em) and the Compton Data Space (CDS) (CDS = Phi and PsiChi) measured_energy = data.energy phi = data.scattering_angle psichi = data.scattered_direction_sc -fig,ax = plt.subplots(subplot_kw = {'projection':'mollview'}) -sc = ax.scatter(psichi.lon.deg, psichi.lat.deg, transform = ax.get_transform('world'), c = phi*180/np.pi, cmap = 'inferno', - s = 2, vmin = 0, vmax = 180) -ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform = ax.get_transform('world'), marker = 'x', s = 100, c = 'red') -fig.colorbar(sc, orientation="horizontal", fraction = .02, label = "$\phi$ [deg]") +# ======= Data visualization ====== + +fig,ax = plt.subplots(2, 3, figsize = [18,8]) -eps = ((measured_energy - ei)/ei).to_value('') -phi_geom,az = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) +# This is a visualization of the Compton cone. Instead of drawing it in 3D space, +# we'll use color to represent the scattering angle Phi, which is usually the z-axis a Compton cone plot. +# The location of the source is marked by an X, and the direction of each scattered photon (PsiChi) is represented +# with a dot +ax[0,0].set_axis_off() # Replace corner plot with axis suitable for spherical data +sph_ax = fig.add_subplot(2,3,1, projection = 'mollview') + +sc = sph_ax.scatter(psichi.lon.deg, psichi.lat.deg, transform = sph_ax.get_transform('world'), c = phi.to_value('deg'), cmap = 'inferno', + s = 2, vmin = 0, vmax = 180) +sph_ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform = sph_ax.get_transform('world'), marker = 'x', s = 100, c = 'red') +fig.colorbar(sc, orientation="horizontal", fraction = .05, label = "phi [deg]") + +sph_ax.set_title("Compton Data Space") + +# While the data live in this complex 4-D space (Em + CDS) it is useful to make some cuts and projections +# to visualize it. For this we use the following coordinates, which are relative to hypothetical source +# (or, in this case, a known source, since we simulated it) +# Epsilon: fractional difference in energy which respect to the energy of the source +# Phi_geometric: angular distance between the source location and PsiChi +# Theta_ARM = the difference between Phi (which is computed exclusively from kinematics) and Phi_geometric +# Zeta: the azimuthal scattering direction, computed from PsiChi once a particular source direction is assumed. +# The zeta=0 direction is arbitrary, and is defined by the polarization convention. +eps = ((measured_energy - source_energy) / source_energy).to_value('') +phi_geom,zeta = RelativeCDSCoordinates(source_direction, pol_convention).to_relative(psichi) theta_arm = phi_geom - phi rel_binned_data = Histogram([Axis(np.linspace(-1,1.1,200), scale = 'linear', label='eps'), @@ -58,90 +101,109 @@ Axis(np.linspace(-180, 180, 180)*u.deg, scale='linear', label='arm'), Axis(np.linspace(-180, 180, 180) * u.deg, scale='linear', label='az')]) -rel_binned_data.fill(eps, phi, theta_arm, az) +rel_binned_data.fill(eps, phi, theta_arm, zeta) -fig,ax = plt.subplots(2, 3, figsize = [18,8]) - -rel_binned_data.project('eps').plot(ax[0,0],errorbars = True) rel_binned_data.slice[{'phi':slice(30,120)}].project('az').rebin(5).plot(ax[1,0],errorbars = True) +ax[1,0].set_title("Azimuthal Scattering Angle Distribution (ASAD)") rel_binned_data.project(['arm','phi']).rebin(3,5).plot(ax[0,1]) +ax[0,1].set_title('Compton cone "wall"') + rel_binned_data.project('phi').rebin(5).plot(ax[0,2],errorbars = True) +ax[0,2].set_title("Polar Scattering Angle") + rel_binned_data.project('arm').rebin(3).plot(ax[1,1],errorbars = True) +ax[1,1].set_title("Angular Resolution Measure (ARM)") + +rel_binned_data.project('eps').plot(ax[1,2],errorbars = True) +ax[1,2].set_title("Energy dispersion") +fig.subplots_adjust(left=.05, right=.95, top=.95, bottom=.1, wspace=0.2, hspace=0.4) plt.show() +# ===== Likelihood setup ===== + +# In order to compute the likelihood we need to know how many counts we expect and, +# if they are detected, what is the probability density of having obtained a +# specific Em+CDS set of parameters. All of this is computed from the IRF's +# effective area and event probability density functions (PDFs). +# Since we used exactly the same effective area and PDFs to simulated our event, +# then we should get the "perfect" result. There will be statistical fluctuations +# resulting in a statistical error, but no systematic error. expectation = ExpectationFromLineInSCFrame(data, irf=irf_unpol, - flux=flux0, + flux=source_flux, duration=duration, - energy=ei, + energy=source_energy, direction=source_direction, polarized_irf=irf_pol, - polarization_degree=pol_degree, - polarization_angle=pol_angle, + polarization_degree=source_pd, + polarization_angle=source_pa, polarization_convention=pol_convention) +likelihood = UnbinnedLikelihood(expectation) +# ==== Fits ==== -likelihood = UnbinnedLikelihood(expectation) +# We'll use a brute-force maximum-likelihood estimation technique. That is, we'll compute +# likelihood as a function of all free parameters, get the combination that maximizes the likelihood, +# and use Wilks theorem to obtain an estimate of the errors. + +# We'll only free one parameter at a time, and set all others to known values. +# The flux will always be a "nuisance" parameter + +fit_energy = True +fit_direction = True +direction_nside = 64 # Increase it to get a better TS map, but it'll last longer +fit_pa_pd = True # ==== Free the source energy ==== -if False: +if fit_energy: # Set everything to the injection values - expectation.set_model(flux=flux0, - energy=ei, - direction=source_direction, - polarization_degree=pol_degree, - polarization_angle=pol_angle) - - loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), - Axis(np.linspace(498, 502, 10)*u.keV, label = 'Ei')]) - - profile = cProfile.Profile() - profile.enable() - tstart = time.perf_counter() - for j, ei_j in enumerate(loglike.axes['Ei'].centers): - print(j) + expectation.set_model(flux=source_flux, + energy=source_energy, + direction=source_direction, + polarization_degree=source_pd, + polarization_angle=source_pa) + + # Compute the likelihood on a grid + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11)/u.cm/u.cm/u.s, label = 'flux'), + Axis(np.linspace(499, 501, 10)*u.keV, label = 'Ei')]) + + for j, ei_j in tqdm(list(enumerate(loglike.axes['Ei'].centers)), desc="Likelihood (free energy)"): for i,flux_i in enumerate(loglike.axes['flux'].centers): expectation.set_model(flux = flux_i, energy = ei_j) - loglike[i,j] = likelihood.get_log_like() - - print(time.perf_counter() - tstart) - profile.disable() - profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") - - (loglike - np.max(loglike)).plot(vmin = -25) + # Use Wilks theorem to get a 90% confidence interval + ts = 2 * (loglike - np.max(loglike)) + ax,_ = ts.plot(vmin = -4.61) + ax.scatter(source_flux.to_value(loglike.axes['flux'].unit), source_energy.to_value(loglike.axes['Ei'].unit), + color='red') + ax.get_figure().axes[-1].set_ylabel("TS") plt.show() # ==== Free the source direction ==== -if False: +if fit_direction: # Set everything to the injection values - expectation.set_model(flux=flux0, - energy=ei, - direction=source_direction, - polarization_degree=pol_degree, - polarization_angle=pol_angle) + expectation.set_model(flux=source_flux, + energy=source_energy, + direction=source_direction, + polarization_degree=source_pd, + polarization_angle=source_pa) - loglike = Histogram([Axis(np.linspace(.5, 1.5, 11)/u.cm/u.cm/u.s, label = 'flux'), - HealpixAxis(nside = 32, label = 'direction', coordsys=SpacecraftFrame())]) + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11)/u.cm/u.cm/u.s, label = 'flux'), + HealpixAxis(nside = direction_nside, label = 'direction', coordsys=SpacecraftFrame())]) loglike[:] = np.nan - profile = cProfile.Profile() - profile.enable() - tstart = time.perf_counter() - sample_pixels = loglike.axes['direction'].query_disc(source_direction.cartesian.xyz, np.deg2rad(10)) - for j, pix in enumerate(sample_pixels): - - print(j,len(sample_pixels)) + sample_pixels = loglike.axes['direction'].query_disc(source_direction.cartesian.xyz, np.deg2rad(3)) + for j, pix in tqdm(list(enumerate(sample_pixels)), desc="Likelihood (direction)"): coord_pix = loglike.axes['direction'].pix2skycoord(pix) @@ -152,34 +214,46 @@ loglike[i,pix] = likelihood.get_log_like() - print(time.perf_counter() - tstart) - profile.disable() - profile.dump_stats("/Users/imartin5/tmp/prof_eval.prof") + fig,ax = plt.subplots(1,2, figsize = [10,4]) + + ax[0].set_axis_off() # Replace corner plot with axis suitable for spherical data + sph_ax = fig.add_subplot(1, 2, 1, projection='cartview', latra = source_direction.lat.deg + [-3,3], lonra = source_direction.lon.deg + np.asarray([-3,3])/np.cos(source_direction.lat.rad)) + sph_ax.coords[0].set_ticks_visible(True) + sph_ax.coords[1].set_ticks_visible(True) + sph_ax.coords[0].set_ticklabel_visible(True) + sph_ax.coords[1].set_ticklabel_visible(True) direction_profile_loglike = HealpixMap(np.nanmax(loglike, axis = 0)) - direction_profile_loglike.plot() + ts_direction = 2*(direction_profile_loglike - np.nanmax(direction_profile_loglike)) + ts_direction.plot(sph_ax, vmin = -4.61) + sph_ax.set_title("Location TS map") + sph_ax.get_figure().axes[-1].set_xlabel("TS") + sph_ax.scatter(source_direction.lon.deg, source_direction.lat.deg, transform=sph_ax.get_transform('world'), + marker='x', s=100, c='red') flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = 1)) - (flux_prof_loglike - np.nanmin(flux_prof_loglike)).plot() + ts_flux = 2*(flux_prof_loglike - np.nanmin(flux_prof_loglike)) + ts_flux.plot(ax[1]) + ax[1].axvline(source_flux.to_value(loglike.axes['flux'].unit), color = 'red', ls = ':') + ax[1].set_title("Flux TS profile") plt.show() # ==== Free PD and PA ==== -if True: +if fit_pa_pd: # Set everything to the injection values - expectation.set_model(flux=flux0, - energy=ei, + expectation.set_model(flux=source_flux, + energy=source_energy, direction=source_direction, - polarization_degree=pol_degree, - polarization_angle=pol_angle) + polarization_degree=source_pd, + polarization_angle=source_pa) - loglike = Histogram([Axis(np.linspace(.5, 1.5, 11) / u.cm / u.cm / u.s, label='flux'), + loglike = Histogram([Axis(np.linspace(.8, 1.2, 11) / u.cm / u.cm / u.s, label='flux'), Axis(np.linspace(40,120,10)*u.deg, label='PA'), Axis(np.linspace(0, 1, 11), label='PD'), ]) - for j, pa_j in enumerate(loglike.axes['PA'].centers): - print(j) + for j, pa_j in tqdm(list(enumerate(loglike.axes['PA'].centers)), desc="Likelihood (polarization)"): for k, pd_k in enumerate(loglike.axes['PD'].centers): for i,flux_i in enumerate(loglike.axes['flux'].centers): @@ -189,16 +263,23 @@ loglike[i,j,k] = likelihood.get_log_like() + fig,ax = plt.subplots(1,2, figaxis = [10,4]) + flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = (1,2))) ts_flux = 2 * (flux_prof_loglike - np.nanmin(flux_prof_loglike)) - ax,_ = ts_flux.plot() - ax.set_ylabel("TS") + ts_flux.plot(ax[0]) + ax[0].axvline(source_flux.to_value(loglike.axes['flux'].unit), color='red', ls=':') + ax[0].set_ylabel("TS") + ax[0].set_title("Flux TS profile") pol_prof_loglike = Histogram([loglike.axes['PA'], loglike.axes['PD']], contents=np.nanmax(loglike, axis=0)) ts_pol = 2 * (pol_prof_loglike - np.nanmax(pol_prof_loglike)) - ax, _ = ts_pol.plot(vmin=-4.61) - ax.scatter(pol_angle.to_value(u.deg), pol_degree, color='red') - ax.get_figure().axes[-1].set_ylabel("TS") + ts_pol.plot(ax[1], vmin=-4.61) + ax[1].scatter(source_pa.to_value(u.deg), source_pd, color='red') + ax[1].get_figure().axes[-1].set_ylabel("TS") + ax[1].set_title("PA-PD TS profile") + + plt.show() - plt.show() \ No newline at end of file +plt.show() \ No newline at end of file From 40d0de056121417e2b4b38060236eba292b9ce3a Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Tue, 4 Nov 2025 10:40:13 -0500 Subject: [PATCH 127/133] get nevents Signed-off-by: Israel Martinez --- cosipy/response/ideal_response.py | 4 ++++ .../examples/ideal_irf/ideal_irf_line_fit_example.py | 6 ++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/cosipy/response/ideal_response.py b/cosipy/response/ideal_response.py index c1e4bf59..3ec3b08e 100644 --- a/cosipy/response/ideal_response.py +++ b/cosipy/response/ideal_response.py @@ -910,6 +910,10 @@ def __iter__(self) -> Iterator[EmCDSEventInSCFrameInterface]: """ yield from self._events + @property + def nevents(self) -> int: + return len(self._events) + class ExpectationFromLineInSCFrame(ExpectationDensityInterface): def __init__(self, diff --git a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py index c7583cad..5e29b331 100644 --- a/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py +++ b/docs/api/interfaces/examples/ideal_irf/ideal_irf_line_fit_example.py @@ -66,6 +66,8 @@ phi = data.scattering_angle psichi = data.scattered_direction_sc +logger.info(f"Got {data.nevents} events.") + # ======= Data visualization ====== fig,ax = plt.subplots(2, 3, figsize = [18,8]) @@ -156,7 +158,7 @@ fit_energy = True fit_direction = True -direction_nside = 64 # Increase it to get a better TS map, but it'll last longer +direction_nside = 128 # Decrease/increase it to get a better/worse TS map. It'll be faster/slower fit_pa_pd = True # ==== Free the source energy ==== @@ -263,7 +265,7 @@ loglike[i,j,k] = likelihood.get_log_like() - fig,ax = plt.subplots(1,2, figaxis = [10,4]) + fig,ax = plt.subplots(1,2, figsize = [10,4]) flux_prof_loglike = Histogram(loglike.axes['flux'], contents = np.nanmax(loglike, axis = (1,2))) ts_flux = 2 * (flux_prof_loglike - np.nanmin(flux_prof_loglike)) From dce3ee7c1ee53380d8801c3f0131a31203d6e221 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 14 Jan 2026 11:52:26 -0500 Subject: [PATCH 128/133] Add tests for SCHistory attitude and location interpolation Signed-off-by: Israel Martinez --- cosipy/test_data/20280301_first_10sec.ori | 22 ++++++++++----------- tests/spacecraftfile/test_spacecraftfile.py | 21 ++++++++++++++++++++ 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/cosipy/test_data/20280301_first_10sec.ori b/cosipy/test_data/20280301_first_10sec.ori index bd60cfd3..578095c6 100644 --- a/cosipy/test_data/20280301_first_10sec.ori +++ b/cosipy/test_data/20280301_first_10sec.ori @@ -1,13 +1,13 @@ Type OrientationsGalactic -OG 1835478000.0 73.14907746670937 41.85821768724895 16.85092253329064 221.85821768724895 0.0 0.0 0.0 1.0 -OG 1835478001.0 73.09517926980278 41.88225011209611 16.904820730197223 221.8822501120961 0.0 0.0 0.0 1.0 -OG 1835478002.0 73.04128380352786 41.90629597072256 16.95871619647214 221.90629597072257 0.0 0.0 0.0 1.0 -OG 1835478003.0 72.98739108131268 41.93035532675578 17.012608918687327 221.93035532675577 0.0 0.0 0.0 1.0 -OG 1835478004.0 72.9335011165853 41.954428243823145 17.066498883414702 221.95442824382317 0.0 0.0 0.0 1.0 -OG 1835478005.0 72.87961392277379 41.978514785552235 17.120386077226204 221.97851478555222 0.0 0.0 0.0 1.0 -OG 1835478006.0 72.82572951330626 42.002615015570285 17.174270486693747 222.0026150155703 0.0 0.0 0.0 1.0 -OG 1835478007.0 72.77184790161073 42.02672899750497 17.228152098389273 222.02672899750493 0.0 0.0 0.0 1.0 -OG 1835478008.0 72.7179691011153 42.05085679498347 17.282030898884702 222.05085679498347 0.0 0.0 0.0 1.0 -OG 1835478009.0 72.66409312524804 42.07499847163346 17.335906874751963 222.07499847163342 0.0 0.0 0.0 1.0 -OG 1835478010.0 72.61021998743702 42.09915409108222 17.38978001256298 222.09915409108223 0.0 0.0 0.0 0.0 +OG 1835478000.0 73.14907746670937 41.85821768724895 16.85092253329064 221.85821768724895 550.0 0.0 0.0 1.0 +OG 1835478001.0 73.09517926980278 41.88225011209611 16.904820730197223 221.8822501120961 550.0 0.0 0.062 1.0 +OG 1835478002.0 73.04128380352786 41.90629597072256 16.95871619647214 221.90629597072257 550.0 0.0 0.124 1.0 +OG 1835478003.0 72.98739108131268 41.93035532675578 17.012608918687327 221.93035532675577 550.0 0.0 0.188 1.0 +OG 1835478004.0 72.9335011165853 41.954428243823145 17.066498883414702 221.95442824382317 550.0 0.0 0.250 1.0 +OG 1835478005.0 72.87961392277379 41.978514785552235 17.120386077226204 221.97851478555222 550.0 0.0 0.313 1.0 +OG 1835478006.0 72.82572951330626 42.002615015570285 17.174270486693747 222.0026150155703 550.0 0.0 0.376 1.0 +OG 1835478007.0 72.77184790161073 42.02672899750497 17.228152098389273 222.02672899750493 550.0 0.0 0.439 1.0 +OG 1835478008.0 72.7179691011153 42.05085679498347 17.282030898884702 222.05085679498347 550.0 0.0 0.501 1.0 +OG 1835478009.0 72.66409312524804 42.07499847163346 17.335906874751963 222.07499847163342 550.0 0.0 0.564 1.0 +OG 1835478010.0 72.61021998743702 42.09915409108222 17.38978001256298 222.09915409108223 550.0 0.0 0.627 0.0 EN diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index e0114576..88854056 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -114,6 +114,27 @@ def test_get_attitude(): assert np.allclose(attitude.as_matrix(), matrix) +def test_interp_attitude(): + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + + assert np.allclose(ori.interp_attitude(Time(1835478000.5, format = 'unix')).as_quat(), [ 0.21284241, -0.55635578, 0.28699986, 0.75019826]) + + # Test edge cases + assert np.allclose(ori.interp_attitude(Time(1835478000.0, format='unix')).as_quat(), ori.attitude[0].as_quat()) + assert np.allclose(ori.interp_attitude(Time(1835478001.0, format='unix')).as_quat(), ori.attitude[1].as_quat()) + +def test_interp_location(): + ori_path = test_data.path / "20280301_first_10sec.ori" + ori = SpacecraftHistory.open(ori_path) + + assert np.allclose(ori.interp_location(Time(1835478000.5, format = 'unix')).cartesian.xyz.to_value(u.km), [ -378.74243194, -6048.59028196, -3346.84484112]) + + # Test edge cases + assert np.allclose(ori.interp_location(Time(1835478000.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[0].cartesian.xyz.to_value(u.km)) + assert np.allclose(ori.interp_location(Time(1835478001.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[1].cartesian.xyz.to_value(u.km)) + + def test_get_target_in_sc_frame(): ori_path = test_data.path / "20280301_first_10sec.ori" From 907f3eee326d1305e083d672e25c2cb8b1d3ec92 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 14 Jan 2026 14:10:26 -0500 Subject: [PATCH 129/133] Bring back improvements to SpacecraftHistory interpolations (back when it was called SpacecraftFile) by jdbuhler in PR #378 (using SLERP) Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 103 +++++++++++++------- tests/spacecraftfile/test_spacecraftfile.py | 4 +- 2 files changed, 70 insertions(+), 37 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 7acea318..fe55dd4a 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -6,7 +6,9 @@ import astropy.constants as c from astropy.time import Time -from astropy.coordinates import SkyCoord, EarthLocation, GCRS, SphericalRepresentation, CartesianRepresentation +from astropy.coordinates import SkyCoord, EarthLocation, GCRS, SphericalRepresentation, CartesianRepresentation, \ + UnitSphericalRepresentation +from astropy.units import Quantity from mhealpy import HealpixBase from histpy import Histogram, TimeAxis from mhealpy import HealpixMap @@ -32,10 +34,10 @@ def __init__(self, location: GCRS, livetime: u.Quantity = None): """ - Handles the spacecraft orientation. Calculates the dwell obstime - map and point source response over a certain orientation period. - Exports the point source response as RMF and ARF files that can be read by XSPEC. - + Handles the spacecraft orientation. Calculates the dwell time + map and point source response over a certain orientation + period. + Parameters ---------- obstime: @@ -240,68 +242,99 @@ def _parse_from_file(cls, file, tstart:Time = None, tstop:Time = None) -> "Space return cls(time, attitude, gcrs2, livetime) - def _interp_attitude(self, points, weights) -> Attitude: + @staticmethod + def _interp_location(t, d1, d2): """ + Compute a direction that linearly interpolates between + directions d1 and d2 using SLERP. + + The two directions are assumed to have the same frame, + which is also used for the interpolated result. Parameters ---------- - points - weights + t : float in [0, 1] + interpolation fraction + d1 : GCRS + 1st direction + d2 : GCRS + 2nd direction Returns ------- + SkyCoord: interpolated direction """ - # TODO: we could do a better interpolation using more points, or - # additional ACS data e.g. the rotation speed + if d1 == d2: + return d1 - rot_matrix = self._attitude.as_matrix() + v1 = d1.cartesian.xyz.value + v2 = d2.cartesian.xyz.value + unit = d1.cartesian.xyz.unit - # In case of multiple points - weights = np.expand_dims(weights, (weights.ndim, weights.ndim+1)) + # angle between v1, v2 + theta = np.arccos(np.dot(v1, v2)/d1.distance.value/d2.distance.value) - interp_attitude = Attitude.from_matrix(rot_matrix[points[0]]*weights[0] + rot_matrix[points[1]]*weights[1], frame = self._attitude.frame) + # SLERP interpolated vector + den = np.sin(theta) + vi = (np.sin((1 - t) * theta) * v1 + np.sin(t * theta) * v2) / den - return interp_attitude + dvi = GCRS(*Quantity(vi, unit = unit, copy = False), representation_type='cartesian') - def interp_attitude(self, time) -> Attitude: + return dvi + + @staticmethod + def _interp_attitude(t, att1, att2): """ + Compute an Attitude that linearly interpolates between + att1 and att2 using SLERP on their quaternion + representations. + + The two Attitudes are assumed to have the same frame, + which is also used for the interpolated result. + + Parameters + ---------- + t : float in [0, 1] + interpolation fraction + att1 : Attitude + att2 : Attitude Returns ------- + Attitude : interpolated attitude """ - points, weights = self.interp_weights(time) + if att1 == att2: + return att1 - return self._interp_attitude(points, weights) + p1 = att1.as_quat() + p2 = att2.as_quat() - def _interp_location(self, points, weights) -> GCRS: - """ + # angle between quaternions p1, p2 (xyzw order) + theta = 2 * np.arccos(np.dot(p1, p2)) - Parameters - ---------- - points - weights + # SLERP interpolated quaternion + den = np.sin(theta) + pi = (np.sin((1 - t) * theta) * p1 + np.sin(t * theta) * p2) / den - Returns - ------- + return Attitude.from_quat(pi, frame=att1.frame) + def interp_attitude(self, time) -> Attitude: """ - # TODO: we could do a better interpolation using more points and orbital dynamics - x, y, z = self._gcrs.represent_as('cartesian').xyz + Returns + ------- - x_interp = x[points[0]] * weights[0] + x[points[1]] * weights[1] - y_interp = y[points[0]] * weights[0] + y[points[1]] * weights[1] - z_interp = z[points[0]] * weights[0] + z[points[1]] * weights[1] + """ - interp_gcrs = GCRS(x=x_interp, y=y_interp, z=z_interp, representation_type = 'cartesian') + points, weights = self.interp_weights(time) - return interp_gcrs + return self.__class__._interp_attitude(weights[1], self._attitude[points[0]], self._attitude[points[1]]) - def interp_location(self, time) -> EarthLocation: + def interp_location(self, time) -> GCRS: """ Returns @@ -310,7 +343,7 @@ def interp_location(self, time) -> EarthLocation: points, weights = self.interp_weights(time) - return self._interp_location(points, weights) + return self.__class__._interp_location(weights[1], self._gcrs[points[0]], self._gcrs[points[1]]) def _cumulative_livetime(self, points, weights) -> u.Quantity: diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index 88854056..f07ce91e 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -118,7 +118,7 @@ def test_interp_attitude(): ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftHistory.open(ori_path) - assert np.allclose(ori.interp_attitude(Time(1835478000.5, format = 'unix')).as_quat(), [ 0.21284241, -0.55635578, 0.28699986, 0.75019826]) + assert np.allclose(ori.interp_attitude(Time(1835478000.5, format = 'unix')).as_quat(), [ 0.21284241, -0.55635581, 0.28699984, 0.75019825]) # Test edge cases assert np.allclose(ori.interp_attitude(Time(1835478000.0, format='unix')).as_quat(), ori.attitude[0].as_quat()) @@ -128,7 +128,7 @@ def test_interp_location(): ori_path = test_data.path / "20280301_first_10sec.ori" ori = SpacecraftHistory.open(ori_path) - assert np.allclose(ori.interp_location(Time(1835478000.5, format = 'unix')).cartesian.xyz.to_value(u.km), [ -378.74243194, -6048.59028196, -3346.84484112]) + assert np.allclose(ori.interp_location(Time(1835478000.5, format = 'unix')).cartesian.xyz.to_value(u.km), [ -378.74248737, -6048.59116724, -3346.84533097]) # Test edge cases assert np.allclose(ori.interp_location(Time(1835478000.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[0].cartesian.xyz.to_value(u.km)) From 363b256cfb2f23f5dfa15c00fa62228c879178cb Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Wed, 14 Jan 2026 16:48:39 -0500 Subject: [PATCH 130/133] Make interpolation work with multiple inputs Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 23 +++++++++++++-------- tests/spacecraftfile/test_spacecraftfile.py | 12 +++++++++-- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index fe55dd4a..248b6377 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -12,6 +12,7 @@ from mhealpy import HealpixBase from histpy import Histogram, TimeAxis from mhealpy import HealpixMap +from ndindex.ndindex import newaxis from scoords import Attitude, SpacecraftFrame @@ -266,7 +267,7 @@ def _interp_location(t, d1, d2): """ - if d1 == d2: + if np.all(d1 == d2): return d1 v1 = d1.cartesian.xyz.value @@ -274,7 +275,7 @@ def _interp_location(t, d1, d2): unit = d1.cartesian.xyz.unit # angle between v1, v2 - theta = np.arccos(np.dot(v1, v2)/d1.distance.value/d2.distance.value) + theta = np.arccos(np.einsum('i...,i...->...',v1, v2)/d1.distance.value/d2.distance.value) # SLERP interpolated vector den = np.sin(theta) @@ -314,7 +315,11 @@ def _interp_attitude(t, att1, att2): p2 = att2.as_quat() # angle between quaternions p1, p2 (xyzw order) - theta = 2 * np.arccos(np.dot(p1, p2)) + theta = 2 * np.arccos(np.einsum('i...,i...->...',p1.transpose(), p2.transpose())) + + # Makes it work with scalars or any input shape + t = t[..., np.newaxis] + theta = theta[..., np.newaxis] # SLERP interpolated quaternion den = np.sin(theta) @@ -407,8 +412,8 @@ def interp(self, times: Time) -> 'SpacecraftHistory': points, weights = self.interp_weights(times) - interp_attitude = self._interp_attitude(points, weights) - interp_location = self._interp_location(points, weights) + interp_attitude = self._interp_attitude(weights[1], self._attitude[points[0]], self._attitude[points[1]]) + interp_location = self._interp_location(weights[1], self._gcrs[points[0]], self._gcrs[points[1]]) cum_livetime = self._cumulative_livetime(points, weights) diff_livetime = cum_livetime[1:] - cum_livetime[:-1] @@ -459,10 +464,10 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis np.append(start.jd2, new_obstime.jd2), format = 'jd') - start_attitude = self._interp_attitude(start_points, start_weights) + start_attitude = self._interp_attitude(start_weights[1], self._attitude[start_points[0]], self._attitude[start_points[1]]) new_attitude = np.append(start_attitude.as_matrix()[None], new_attitude, axis=0) - start_location = self._interp_location(start_points, start_weights)[None].cartesian.xyz + start_location = self._interp_location(start_weights[1], self._gcrs[start_points[0]], self._gcrs[start_points[1]])[None].cartesian.xyz new_location = np.append(start_location, new_location, axis = 1) first_livetime = self.livetime[start_points[0]] * start_weights[0] @@ -475,11 +480,11 @@ def select_interval(self, start:Time = None, stop:Time = None) -> "SpacecraftHis np.append(new_obstime.jd2, stop.jd2), format='jd') - stop_attitude = self._interp_attitude(stop_points, stop_weights) + stop_attitude = self._interp_attitude(stop_weights[1], self._attitude[stop_points[0]], self._attitude[stop_points[1]]) new_attitude = np.append(new_attitude, stop_attitude.as_matrix()[None], axis=0) new_attitude = Attitude.from_matrix(new_attitude, frame=self._attitude.frame) - stop_location = self._interp_location(stop_points, stop_weights)[None].cartesian.xyz + stop_location = self._interp_location(stop_weights[1], self._gcrs[stop_points[0]], self._gcrs[stop_points[1]])[None].cartesian.xyz new_location = np.append(new_location, stop_location, axis=1) new_location = GCRS(x = new_location[0], y = new_location[1], z = new_location[2], diff --git a/tests/spacecraftfile/test_spacecraftfile.py b/tests/spacecraftfile/test_spacecraftfile.py index f07ce91e..b22120b9 100644 --- a/tests/spacecraftfile/test_spacecraftfile.py +++ b/tests/spacecraftfile/test_spacecraftfile.py @@ -120,7 +120,11 @@ def test_interp_attitude(): assert np.allclose(ori.interp_attitude(Time(1835478000.5, format = 'unix')).as_quat(), [ 0.21284241, -0.55635581, 0.28699984, 0.75019825]) - # Test edge cases + # Multiple + assert np.allclose(ori.interp_attitude(Time([1835478000.5, 1835478000.5], format='unix')).as_quat(), + [[0.21284241, -0.55635581, 0.28699984, 0.75019825],[0.21284241, -0.55635581, 0.28699984, 0.75019825]]) + + # Test edges assert np.allclose(ori.interp_attitude(Time(1835478000.0, format='unix')).as_quat(), ori.attitude[0].as_quat()) assert np.allclose(ori.interp_attitude(Time(1835478001.0, format='unix')).as_quat(), ori.attitude[1].as_quat()) @@ -130,7 +134,11 @@ def test_interp_location(): assert np.allclose(ori.interp_location(Time(1835478000.5, format = 'unix')).cartesian.xyz.to_value(u.km), [ -378.74248737, -6048.59116724, -3346.84533097]) - # Test edge cases + # Multiple + assert np.allclose(ori.interp_location(Time([1835478000.5,1835478000.5], format='unix')).cartesian.xyz.to_value(u.km), + np.transpose([[-378.74248737, -6048.59116724, -3346.84533097],[-378.74248737, -6048.59116724, -3346.84533097]])) + + # Test edges assert np.allclose(ori.interp_location(Time(1835478000.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[0].cartesian.xyz.to_value(u.km)) assert np.allclose(ori.interp_location(Time(1835478001.0, format='unix')).cartesian.xyz.to_value(u.km), ori.location[1].cartesian.xyz.to_value(u.km)) From 317ab82080d45b235c18eee0f5bec5a1217d0318 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 26 Jan 2026 16:45:21 -0500 Subject: [PATCH 131/133] Make crab binned fit work with latest develop Signed-off-by: Israel Martinez --- cosipy/response/PointSourceResponse.py | 8 +- cosipy/response/instrument_response.py | 7 +- .../response/threeml_point_source_response.py | 1 - cosipy/spacecraftfile/spacecraft_file.py | 162 ++++++++++++------ 4 files changed, 118 insertions(+), 60 deletions(-) diff --git a/cosipy/response/PointSourceResponse.py b/cosipy/response/PointSourceResponse.py index 60cc4118..1dee8a0c 100644 --- a/cosipy/response/PointSourceResponse.py +++ b/cosipy/response/PointSourceResponse.py @@ -196,13 +196,7 @@ def from_scatt_map(cls, psr = Quantity(np.empty(shape=axes.shape), unit = u.cm * u.cm * u.s) - for i, (pixels, exposure) in \ - enumerate(zip(scatt_map.contents.coords.transpose(), - scatt_map.contents.data * scatt_map.unit)): - - att = Attitude.from_axes(x=scatt_map.axes['x'].pix2skycoord(pixels[0]), - y=scatt_map.axes['y'].pix2skycoord(pixels[1])) - + for att, exposure in zip(scatt_map.attitudes, scatt_map.weights): response.differential_effective_area(data, coord, diff --git a/cosipy/response/instrument_response.py b/cosipy/response/instrument_response.py index 704fd95c..b69ba178 100644 --- a/cosipy/response/instrument_response.py +++ b/cosipy/response/instrument_response.py @@ -274,10 +274,9 @@ def rotate_coords(c, rot): else: weight_unit = None - self._dr._add_rot_psrs(out_axes, weight, - loc_psichi_pixels, - (loc_src_pixels,), (out.value,)) - + out.value[:] += self._dr._rot_psr(out_axes, weight, + loc_psichi_pixels, + (loc_src_pixels,)) if weight_unit is not None: out = u.Quantity(out.value, weight_unit*out.unit, copy = False) diff --git a/cosipy/response/threeml_point_source_response.py b/cosipy/response/threeml_point_source_response.py index 1fccabd4..d27ad1a8 100644 --- a/cosipy/response/threeml_point_source_response.py +++ b/cosipy/response/threeml_point_source_response.py @@ -181,7 +181,6 @@ def expectation(self, copy = True)-> Histogram: scatt_map = self._sc_ori.get_scatt_map(nside=self._nside, target_coord=coord, - coordsys=coordsys, earth_occ=True) self._psr = PointSourceResponse.from_scatt_map(coord, diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 248b6377..7032533b 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -10,7 +10,7 @@ UnitSphericalRepresentation from astropy.units import Quantity from mhealpy import HealpixBase -from histpy import Histogram, TimeAxis +from histpy import Histogram, TimeAxis, HealpixAxis, Axis from mhealpy import HealpixMap from ndindex.ndindex import newaxis @@ -597,77 +597,143 @@ def get_dwell_map(self, target_coord:SkyCoord, nside:int = None, scheme = 'ring' return dwell_map def get_scatt_map(self, - nside, - target_coord=None, - scheme = 'ring', - coordsys = 'galactic', - earth_occ = True - ) -> SpacecraftAttitudeMap: + nside, + target_coord=None, + earth_occ=True, + angle_nbins=None) -> SpacecraftAttitudeMap: """ - Bin the spacecraft attitude history into a 4D histogram that - contains the accumulated obstime the axes of the spacecraft where - looking at a given direction. + Bin the spacecraft attitude history into a list of discretized + attitudes with associated time weights. Discretization is + performed on the rotation-vector representation of the + attitude; the supplied nside parameter describes a HEALPix + grid that discretizes the rotvec's direction, while a multiple + of nside defines the number of bins to discretize its angle. + + If a target coordinate is provided and earth_occ is True, + attitudes for which the view of the target is occluded by + the earth are excluded. Parameters ---------- - target_coord : astropy.coordinates.SkyCoord, optional - The coordinates of the target object. nside : int The nside of the scatt map. - scheme : str, optional - The scheme of the scatt map (the default is "ring") - coordsys : str, optional - The coordinate system used in the scatt map (the default is "galactic). + target_coord : astropy.coordinates.SkyCoord, optional + The coordinates of the target object. earth_occ : bool, optional Option to include Earth occultation in scatt map calculation. - Default is True. + Default is True. + angle_nbins : int (optional) + Number of bins used for the rotvec's angle. If none + specified, default is 8*nside Returns ------- - h_ori : cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap + cosipy.spacecraftfile.scatt_map.SpacecraftAttitudeMap The spacecraft attitude map. + """ - # Check if target_coord is needed - if earth_occ and target_coord is None: - raise ValueError("target_coord is needed when earth_occ = True") + def _cart_to_polar(v): + """ + Convert Cartesian 3D unit direction vectors to polar coordinates. + + Parameters + ---------- + v : np.ndarray(float) [N x 3] + array of N 3D unit vectors - # Get orientations - attitudes = self.attitude + Returns + ------- + lon, colat : np.ndarray(float) [N] + longitude and co-latitude corresponding to v in radians - # Altitude at each point in the orbit: - gcrs_cart = self._gcrs.represent_as(CartesianRepresentation) - dist_earth_center = gcrs_cart.norm() + """ - # Fill (only 2 axes needed to fully define the orientation) - h_ori = SpacecraftAttitudeMap(nside = nside, - scheme = scheme, - coordsys = coordsys) - - x,y,z = attitudes[:-1].as_axes() - - # Get max angle based on altitude: - max_angle = np.pi*u.rad - np.arcsin(c.R_earth/dist_earth_center) + lon = np.arctan2(v[:, 1], v[:, 0]) + colat = np.arccos(v[:, 2]) + return (lon, colat) - # Define weights and set to 0 if blocked by Earth: - weight = self.livetime + source = target_coord if earth_occ: - # Calculate angle between source direction and Earth zenith - # for each obstime stamp: - src_angle = target_coord.separation(self.earth_zenith) - # Get pointings that are occulted by Earth: - earth_occ_index = src_angle >= max_angle + # earth radius + r_earth = 6378.0 + + # Need a source location to compute earth occultation + if source is None: + raise ValueError("target_coord is needed when earth_occ is True") + + # calculate angle between source direction and Earth zenith + # for each time stamp + src_angle = source.separation(self.earth_zenith) + + # get max angle based on altitude + max_angle = np.pi - np.arcsin(r_earth/(r_earth + self.location.distance.km)) + + # get pointings that are occluded by Earth + is_occluded = src_angle.rad >= max_angle + + # zero out weights of time bins corresponding to occluded pointings + time_weights = np.where(is_occluded[:-1], 0, self.livetime.value) + + else: + source = None # w/o occultation, result is not dependent on source + time_weights = self.livetime.value + + # Get orientations as rotation vectors (center dir, angle around center) + + rot_vecs = self._attitude[:-1].as_rotvec() + rot_angles = np.linalg.norm(rot_vecs, axis=-1) + rot_dirs = rot_vecs / rot_angles[:,None] + + # discretize rotvecs for input Attitudes + + dir_axis = HealpixAxis(nside=nside, coordsys=self._attitude.frame) + + if angle_nbins is None: + angle_nbins = 8*nside + + angle_axis = Axis(np.linspace(0., 2*np.pi, num=angle_nbins+1), unit=u.rad) + + r_lon, r_colat = _cart_to_polar(rot_dirs.value) + + dir_bins = dir_axis.find_bin(theta=r_colat, + phi=r_lon) + angle_bins = angle_axis.find_bin(rot_angles) + + # compute list of unique rotvec bins occurring in input, + # along with mapping from time to rotvec bin + shape = (dir_axis.nbins, angle_axis.nbins) + + att_bins = np.ravel_multi_index((dir_bins, angle_bins), + shape) + + # compute an Attitude for each unique rotvec bin + + unique_atts, time_to_att_map = np.unique(att_bins, + return_inverse=True) + (unique_dirs, unique_angles) = np.unravel_index(unique_atts, + shape) + v = dir_axis.pix2vec(unique_dirs) + + binned_attitudes = Attitude.from_rotvec(np.column_stack(v) * + angle_axis.centers[unique_angles][:,None], + frame = self._attitude.frame) + + # sum weights for all attitudes mapping to each bin + binned_weights = np.zeros(len(unique_atts)) + np.add.at(binned_weights, time_to_att_map, time_weights) + + # remove any attitudes with zero weight + binned_attitudes = binned_attitudes[binned_weights > 0] + binned_weights = binned_weights[binned_weights > 0] - # Mask - weight[earth_occ_index[:-1]] = 0 - - # Fill histogram: - h_ori.fill(x, y, weight = weight) + return SpacecraftAttitudeMap(binned_attitudes, + u.Quantity(binned_weights, unit=self.livetime.unit, copy=False), + source = source) - return h_ori From 4dbfbee5e86151c19bb6fd96019290ee7d8892b1 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Fri, 30 Jan 2026 16:50:00 -0500 Subject: [PATCH 132/133] Merge develop nd fix some unit test (not everyone yet) Signed-off-by: Israel Martinez --- cosipy/data_io/UnBinnedData.py | 14 ++--- .../scatt_exposure_table.py | 31 +++++---- .../time_binned_exposure_table.py | 29 +++++---- cosipy/polarization/conventions.py | 8 ++- cosipy/spacecraftfile/__init__.py | 2 +- cosipy/spacecraftfile/spacecraft_file.py | 1 - .../test_exposure_table.py | 12 ++-- tests/interfaces/test_background_interface.py | 2 +- tests/polarization/test_polarization_asad.py | 4 +- .../spacecraftfile/test_arf_rmf_converter.py | 2 +- tests/threeml/test_spectral_fitting.py | 63 ++++++++++++++----- tests/ts_map/test_fast_ts_map.py | 2 +- 12 files changed, 108 insertions(+), 62 deletions(-) diff --git a/cosipy/data_io/UnBinnedData.py b/cosipy/data_io/UnBinnedData.py index e3a6190e..a1cbb0cf 100644 --- a/cosipy/data_io/UnBinnedData.py +++ b/cosipy/data_io/UnBinnedData.py @@ -19,8 +19,8 @@ from scoords import Attitude, SpacecraftFrame import cosipy +from cosipy.spacecraftfile import SpacecraftHistory from cosipy.data_io import DataIO -from cosipy.spacecraftfile import SpacecraftFile import logging logger = logging.getLogger(__name__) @@ -472,10 +472,10 @@ def instrument_pointing(self, ori_file): """ # Get ori info: - ori = SpacecraftFile.parse_from_file(ori_file) - time_tags = ori.get_time().to_value(format="unix") - x_pointings = ori.x_pointings - z_pointings = ori.z_pointings + ori = SpacecraftHistory.open(self.ori_file) + time_tags = ori.obstime.to_value(format="unix") + + x_pointings, _, z_pointings = ori.attitude.as_axes() # Interpolate: self.xl_interp = interpolate.interp1d(time_tags, x_pointings.l.rad, kind='linear') @@ -905,10 +905,10 @@ def cut_SAA_events(self, unbinned_data=None, output_name=None): self.cosi_dataset = self.get_dict(unbinned_data) # Get orientation info - ori = SpacecraftFile.parse_from_file(self.ori_file) + ori = SpacecraftHistory.open(self.ori_file) # Get bad time intervals - bti = self.find_bad_intervals(ori._time, ori.livetime) + bti = self.find_bad_intervals(ori.obstime, ori.livetime) # Get indices for good photons time_keep_index = self.filter_good_data(self.cosi_dataset['TimeTags'], bti) diff --git a/cosipy/image_deconvolution/scatt_exposure_table.py b/cosipy/image_deconvolution/scatt_exposure_table.py index 5a7caa5f..0869cd69 100644 --- a/cosipy/image_deconvolution/scatt_exposure_table.py +++ b/cosipy/image_deconvolution/scatt_exposure_table.py @@ -1,4 +1,7 @@ import logging + +from setuptools.command.easy_install import easy_install + logger = logging.getLogger(__name__) from tqdm.autonotebook import tqdm @@ -135,7 +138,7 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, altitude_list = [] if start is not None and stop is not None: - orientation = orientation.source_interval(start, stop) + orientation = orientation.select_interval(start, stop) elif start is not None: logger.error("please specify the stop time") return @@ -143,24 +146,28 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, logger.error("please specify the start time") return - ori_time = orientation.get_time() + ori_time = orientation.obstime logger.info(f'duration: {(ori_time[-1] - ori_time[0]).to("day")}') - attitude = orientation.get_attitude()[:-1] + attitude = orientation.attitude[:-1] pointing_list = attitude.transform_to("galactic").as_axes() n_pointing = len(pointing_list[0]) - - l_x = orientation.x_pointings.l.value[:-1] - b_x = orientation.x_pointings.b.value[:-1] - l_z = orientation.z_pointings.l.value[:-1] - b_z = orientation.z_pointings.b.value[:-1] + x_pointings, _, z_pointings = orientation.attitude.as_axes() + + l_x = x_pointings.l.value[:-1] + b_x = x_pointings.b.value[:-1] + + l_z = z_pointings.l.value[:-1] + b_z = z_pointings.b.value[:-1] + + earth_zenith_coord = orientation.earth_zenith.transform_to('galactic') - earth_zenith_l = orientation.earth_zenith.l.value[:-1] - earth_zenith_b = orientation.earth_zenith.b.value[:-1] + earth_zenith_l = earth_zenith_coord.l.value[:-1] + earth_zenith_b = earth_zenith_coord.b.value[:-1] if scheme == 'ring': nest = False @@ -173,8 +180,8 @@ def analyze_orientation(cls, orientation, nside, scheme = 'ring', start = None, idx_x = hp.ang2pix(nside, l_x, b_x, nest=nest, lonlat=True) idx_z = hp.ang2pix(nside, l_z, b_z, nest=nest, lonlat=True) - livetime = orientation.livetime - altitude = orientation.get_altitude()[:-1] + livetime = orientation.livetime.to_value(u.s) + altitude = orientation.location.spherical.distance[:-1].to_value(u.km) for i in tqdm(range(n_pointing)): diff --git a/cosipy/image_deconvolution/time_binned_exposure_table.py b/cosipy/image_deconvolution/time_binned_exposure_table.py index 60a3086c..f54dc075 100644 --- a/cosipy/image_deconvolution/time_binned_exposure_table.py +++ b/cosipy/image_deconvolution/time_binned_exposure_table.py @@ -1,4 +1,7 @@ import logging + +from astropy.time import Time + logger = logging.getLogger(__name__) from tqdm.autonotebook import tqdm @@ -97,27 +100,31 @@ def from_orientation(cls, orientation, tstart_list, tstop_list, **kwargs): for time_binning_index, (tstart, tstop) in enumerate(zip(tstart_list, tstop_list)): - this_orientation = orientation.source_interval(tstart, tstop) + this_orientation = orientation.select_interval(tstart, tstop) time_binning_indices.append(time_binning_index) - attitude = this_orientation.get_attitude()[:-1] + attitude = this_orientation.attitude[:-1] pointing_list = attitude.transform_to("galactic").as_axes() n_pointing = len(pointing_list[0]) - - l_x = this_orientation.x_pointings.l.value[:-1] - b_x = this_orientation.x_pointings.b.value[:-1] - l_z = this_orientation.z_pointings.l.value[:-1] - b_z = this_orientation.z_pointings.b.value[:-1] + x_pointings, _, z_pointings = this_orientation.attitude.as_axes() + + l_x = x_pointings.l.value[:-1] + b_x = x_pointings.b.value[:-1] + + l_z = z_pointings.l.value[:-1] + b_z = z_pointings.b.value[:-1] + + earth_zenith_coord = this_orientation.earth_zenith.transform_to('galactic') - earth_zenith_l = this_orientation.earth_zenith.l.value[:-1] - earth_zenith_b = this_orientation.earth_zenith.b.value[:-1] + earth_zenith_l = earth_zenith_coord.l.value[:-1] + earth_zenith_b = earth_zenith_coord.b.value[:-1] - livetime = this_orientation.livetime - altitude = this_orientation.get_altitude()[:-1] + livetime = this_orientation.livetime.to_value(u.s) + altitude = this_orientation.location.spherical.distance[:-1].to_value(u.km) # appending the value livetimes.append(livetime) diff --git a/cosipy/polarization/conventions.py b/cosipy/polarization/conventions.py index 9dce0729..80787097 100644 --- a/cosipy/polarization/conventions.py +++ b/cosipy/polarization/conventions.py @@ -1,7 +1,7 @@ from typing import Union import numpy as np -from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, frame_transform_graph +from astropy.coordinates import SkyCoord, Angle, BaseCoordinateFrame, frame_transform_graph, ICRS import astropy.units as u import inspect from scoords import Attitude, SpacecraftFrame @@ -141,12 +141,16 @@ def __init__(self, Set the reference vector, defaulting to celestial north if not provided (IAU convention). Alternatively, pass the cartesian representation and set a frame. frame : BaseCoordinateFrame - Only used if ref_vector is a bare cartesian vector. + Only used if ref_vector is a bare cartesian vector. Default: ICRS clockwise : bool Direction of increasing PA, when looking at the source. Default is false --i.e. counter-clockwise when looking outwards. """ + + if frame is None: + frame = ICRS + if ref_vector is None: self._ref_vector = np.asarray([0,0,1]) self._frame = frame diff --git a/cosipy/spacecraftfile/__init__.py b/cosipy/spacecraftfile/__init__.py index 9eb8444d..558f75d5 100644 --- a/cosipy/spacecraftfile/__init__.py +++ b/cosipy/spacecraftfile/__init__.py @@ -1,2 +1,2 @@ from .spacecraft_file import * -from .scatt_map import SpacecraftAttitudeMap +from .scatt_map import SpacecraftAttitudeMap, SpacecraftAxisMap diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 7032533b..69bddc12 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -12,7 +12,6 @@ from mhealpy import HealpixBase from histpy import Histogram, TimeAxis, HealpixAxis, Axis from mhealpy import HealpixMap -from ndindex.ndindex import newaxis from scoords import Attitude, SpacecraftFrame diff --git a/tests/image_deconvolution/test_exposure_table.py b/tests/image_deconvolution/test_exposure_table.py index 146bb164..a7577769 100644 --- a/tests/image_deconvolution/test_exposure_table.py +++ b/tests/image_deconvolution/test_exposure_table.py @@ -15,20 +15,20 @@ def test_scatt_exposure_table(tmp_path): ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") - assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.get_time()[-1], min_livetime=0, min_num_pointings=1) == None + assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=None, stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) == None - assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=ori.get_time()[0], stop=None, min_livetime=0, min_num_pointings=1) == None + assert SpacecraftAttitudeExposureTable.analyze_orientation(ori, nside=nside, start=ori.obstime[0], stop=None, min_livetime=0, min_num_pointings=1) == None exposure_table = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table_nest = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, scheme = 'nested', - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table_badscheme = SpacecraftAttitudeExposureTable.from_orientation(ori, nside=nside, scheme = None, - start=ori.get_time()[0], stop=ori.get_time()[-1], + start=ori.obstime[0], stop=ori.obstime[-1], min_livetime=0, min_num_pointings=1) exposure_table.save_as_fits(tmp_path / "exposure_table_test_nside1_ring.fits") @@ -69,7 +69,7 @@ def test_scatt_exposure_table(tmp_path): assert np.all(binned_signal.contents == binned_signal_ref.contents) def test_time_binned_exposure_table(tmp_path): - ori = SpacecraftFile.parse_from_file(test_data.path / "20280301_first_10sec.ori") + ori = SpacecraftHistory.open(test_data.path / "20280301_first_10sec.ori") tstart_list = Time([1835478000.0], scale='utc', format='unix') tstop_list = Time([1835478005.0], scale='utc', format='unix') diff --git a/tests/interfaces/test_background_interface.py b/tests/interfaces/test_background_interface.py index cfc8227c..e59e049b 100644 --- a/tests/interfaces/test_background_interface.py +++ b/tests/interfaces/test_background_interface.py @@ -1,5 +1,5 @@ from cosipy.interfaces import (BackgroundInterface, BinnedBackgroundInterface, - UnbinnedBackgroundInterface + BackgroundDensityInterface ) diff --git a/tests/polarization/test_polarization_asad.py b/tests/polarization/test_polarization_asad.py index 590fc80b..b5527b0f 100644 --- a/tests/polarization/test_polarization_asad.py +++ b/tests/polarization/test_polarization_asad.py @@ -3,7 +3,7 @@ from astropy import units as u from scoords import SpacecraftFrame -from cosipy.polarization import PolarizationASAD +from cosipy.polarization_fitting import PolarizationASAD from cosipy.polarization.conventions import IAUPolarizationConvention, MEGAlibRelativeZ from cosipy.spacecraftfile import SpacecraftHistory from cosipy import BinnedData @@ -17,7 +17,7 @@ response_path = test_data.path / 'test_polarization_response.h5' sc_orientation = SpacecraftHistory.open(test_data.path / 'polarization_ori.ori') -attitude = sc_orientation.get_attitude()[0] +attitude = sc_orientation.attitude[0] a = 10. * u.keV b = 10000. * u.keV diff --git a/tests/spacecraftfile/test_arf_rmf_converter.py b/tests/spacecraftfile/test_arf_rmf_converter.py index ada23afb..72c5b73c 100644 --- a/tests/spacecraftfile/test_arf_rmf_converter.py +++ b/tests/spacecraftfile/test_arf_rmf_converter.py @@ -6,7 +6,7 @@ from astropy.io import fits from cosipy import test_data, SpacecraftHistory from cosipy.response import FullDetectorResponse -from cosipy.spacecraftfile import RspArfRmfConverter +from cosipy.response import RspArfRmfConverter from astropy import units as u diff --git a/tests/threeml/test_spectral_fitting.py b/tests/threeml/test_spectral_fitting.py index c67dc7ca..436ab001 100644 --- a/tests/threeml/test_spectral_fitting.py +++ b/tests/threeml/test_spectral_fitting.py @@ -1,4 +1,11 @@ -from cosipy import COSILike, test_data, BinnedData +import sys + +from cosipy import test_data, BinnedData +from cosipy.background_estimation import FreeNormBinnedBackground +from cosipy.data_io import EmCDSBinnedData +from cosipy.interfaces import ThreeMLPluginInterface +from cosipy.response import BinnedThreeMLModelFolding, FullDetectorResponse, BinnedInstrumentResponse, \ + BinnedThreeMLPointSourceResponse from cosipy.spacecraftfile import SpacecraftHistory import astropy.units as u import numpy as np @@ -6,16 +13,18 @@ from astromodels import Parameter from astropy.coordinates import SkyCoord +from cosipy.statistics import PoissonLikelihood + data_path = test_data.path sc_orientation = SpacecraftHistory.open(data_path / "20280301_2s.ori") -dr = str(data_path / "test_full_detector_response.h5") # path to detector response +dr_path = str(data_path / "test_full_detector_response.h5") # path to detector response -data = BinnedData(data_path / "test_spectral_fit.yaml") -background = BinnedData(data_path / "test_spectral_fit.yaml") +crab = BinnedData(data_path / "test_spectral_fit.yaml") +bkg_dist = BinnedData(data_path / "test_spectral_fit.yaml") -data.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_data.h5") -background.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_background.h5") +crab.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_data.h5") +bkg_dist.load_binned_data_from_hdf5(binned_data=data_path / "test_spectral_fit_background.h5") bkg_par = Parameter("background_cosi", # background parameter 1, # initial value of parameter @@ -52,14 +61,38 @@ model = Model(source) -def test_point_source_spectral_fit(): +def test_point_source_spectral_fit(background=None): + + dr = FullDetectorResponse.open(dr_path) + instrument_response = BinnedInstrumentResponse(dr) + + # Workaround to avoid inf values. Out bkg should be smooth, but currently it's not. + # Reproduces results before refactoring. It's not _exactly_ the same, since this fudge value was 1e-12, and + # it was added to the expectation, not the normalized bkg + global bkg_dist # Was giving the error "UnboundLocalError: cannot access local variable 'bkg_dist' where it is not associated with a value" + bkg_dist = bkg_dist.binned_data.project('Em', 'Phi', 'PsiChi') + bkg_dist += sys.float_info.min + + data = EmCDSBinnedData(crab.binned_data.project('Em', 'Phi', 'PsiChi') + bkg_dist) + bkg = FreeNormBinnedBackground(bkg_dist, + sc_history=sc_orientation, + copy=False) - cosi = COSILike("cosi", # COSI 3ML plugin - dr = dr, # detector response - data = data.binned_data.project('Em', 'Phi', 'PsiChi'), # data (source+background) - bkg = background.binned_data.project('Em', 'Phi', 'PsiChi'), # background model - sc_orientation = sc_orientation, # spacecraft orientation - nuisance_param = bkg_par) # background parameter + psr = BinnedThreeMLPointSourceResponse(data=data, + instrument_response=instrument_response, + sc_history=sc_orientation, + energy_axis=dr.axes['Ei'], + polarization_axis=dr.axes['Pol'] if 'Pol' in dr.axes.labels else None, + nside=2 * data.axes['PsiChi'].nside) + + response = BinnedThreeMLModelFolding(data=data, point_source_response=psr) + + like_fun = PoissonLikelihood(data, response, bkg) + + cosi = ThreeMLPluginInterface('cosi', + like_fun, + response, + bkg) plugins = DataList(cosi) @@ -76,7 +109,3 @@ def test_point_source_spectral_fit(): assert np.allclose([cosi.get_log_like()], [213.14242014103897], atol=[1.0]) - - # Test scatt map method: - coord = SkyCoord(l=184.56*u.deg,b=-5.78*u.deg,frame="galactic") - cosi._get_scatt_map(coord) diff --git a/tests/ts_map/test_fast_ts_map.py b/tests/ts_map/test_fast_ts_map.py index a07185ec..52ee7662 100644 --- a/tests/ts_map/test_fast_ts_map.py +++ b/tests/ts_map/test_fast_ts_map.py @@ -19,7 +19,7 @@ from histpy import Histogram from cosipy import test_data -from cosipy import FastTSMap, MOCTSMap, SpacecraftFile +from cosipy import FastTSMap, MOCTSMap, SpacecraftHistory def test_ts_fit(): From e019541adf225e02e124d4146ecada1200eba254 Mon Sep 17 00:00:00 2001 From: Israel Martinez Date: Mon, 2 Feb 2026 14:15:30 -0500 Subject: [PATCH 133/133] Fix bug from develop-interfaces merge. GCRS only has "distance" in spherical coordinates. Signed-off-by: Israel Martinez --- cosipy/spacecraftfile/spacecraft_file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cosipy/spacecraftfile/spacecraft_file.py b/cosipy/spacecraftfile/spacecraft_file.py index 69bddc12..71cda049 100644 --- a/cosipy/spacecraftfile/spacecraft_file.py +++ b/cosipy/spacecraftfile/spacecraft_file.py @@ -274,7 +274,7 @@ def _interp_location(t, d1, d2): unit = d1.cartesian.xyz.unit # angle between v1, v2 - theta = np.arccos(np.einsum('i...,i...->...',v1, v2)/d1.distance.value/d2.distance.value) + theta = np.arccos(np.einsum('i...,i...->...',v1, v2)/d1.spherical.distance.value/d2.spherical.distance.value) # SLERP interpolated vector den = np.sin(theta) @@ -669,7 +669,7 @@ def _cart_to_polar(v): src_angle = source.separation(self.earth_zenith) # get max angle based on altitude - max_angle = np.pi - np.arcsin(r_earth/(r_earth + self.location.distance.km)) + max_angle = np.pi - np.arcsin(r_earth/(r_earth + self.location.spherical.distance.km)) # get pointings that are occluded by Earth is_occluded = src_angle.rad >= max_angle